Esempio n. 1
0
def _RunInstrumentCommand(_command, options, _, option_parser):
    """Instruments jar files using EMMA.

  Args:
    command: String indicating the command that was received to trigger
        this function.
    options: optparse options dictionary.
    args: List of extra args from optparse.
    option_parser: optparse.OptionParser object.

  Returns:
    An exit code.
  """
    if not (options.input_path and options.output_path
            and options.coverage_file and options.sources_list_file and
            (options.source_files or options.source_dirs
             or options.java_sources_file) and options.src_root
            and options.emma_jar):
        option_parser.error('All arguments are required.')

    if os.path.exists(options.coverage_file):
        os.remove(options.coverage_file)
    temp_dir = tempfile.mkdtemp()
    try:
        cmd = [
            'java', '-cp', options.emma_jar, 'emma', 'instr', '-ip',
            options.input_path, '-ix', options.filter_string, '-d', temp_dir,
            '-out', options.coverage_file, '-m', 'fullcopy'
        ]
        build_utils.CheckOutput(cmd)

        # File is not generated when filter_string doesn't match any files.
        if not os.path.exists(options.coverage_file):
            build_utils.Touch(options.coverage_file)

        temp_jar_dir = os.path.join(temp_dir, 'lib')
        jars = os.listdir(temp_jar_dir)
        if len(jars) != 1:
            print('Error: multiple output files in: %s' % (temp_jar_dir))
            return 1

        # Delete output_path first to avoid modifying input_path in the case where
        # input_path is a hardlink to output_path. http://crbug.com/571642
        if os.path.exists(options.output_path):
            os.unlink(options.output_path)
        shutil.move(os.path.join(temp_jar_dir, jars[0]), options.output_path)
    finally:
        shutil.rmtree(temp_dir)

    if options.source_dirs:
        source_dirs = build_utils.ParseGnList(options.source_dirs)
    else:
        source_files = []
        if options.source_files:
            source_files += build_utils.ParseGnList(options.source_files)
        if options.java_sources_file:
            source_files.extend(
                build_utils.ReadSourcesList(options.java_sources_file))
        source_dirs = _GetSourceDirsFromSourceFiles(source_files)

    # TODO(GYP): In GN, we are passed the list of sources, detecting source
    # directories, then walking them to re-establish the list of sources.
    # This can obviously be simplified!
    _CreateSourcesListFile(source_dirs, options.sources_list_file,
                           options.src_root)

    if options.stamp:
        build_utils.Touch(options.stamp)

    if options.depfile:
        build_utils.WriteDepfile(options.depfile,
                                 build_utils.GetPythonDependencies())

    return 0
Esempio n. 2
0
def main():
    args = build_utils.ExpandFileArgs(sys.argv[1:])

    options = ParseArgs(args)
    android_jar = os.path.join(options.android_sdk, 'android.jar')
    aapt = os.path.join(options.android_sdk_tools, 'aapt')

    input_files = []

    with build_utils.TempDir() as temp_dir:
        deps_dir = os.path.join(temp_dir, 'deps')
        build_utils.MakeDirectory(deps_dir)
        v14_dir = os.path.join(temp_dir, 'v14')
        build_utils.MakeDirectory(v14_dir)

        gen_dir = os.path.join(temp_dir, 'gen')
        build_utils.MakeDirectory(gen_dir)

        input_resource_dirs = build_utils.ParseGypList(options.resource_dirs)

        if not options.v14_skip:
            for resource_dir in input_resource_dirs:
                generate_v14_compatible_resources.GenerateV14Resources(
                    resource_dir, v14_dir, options.v14_verify_only)

        dep_zips = build_utils.ParseGypList(options.dependencies_res_zips)
        input_files += dep_zips
        dep_subdirs = []
        for z in dep_zips:
            subdir = os.path.join(deps_dir, os.path.basename(z))
            if os.path.exists(subdir):
                raise Exception('Resource zip name conflict: ' +
                                os.path.basename(z))
            build_utils.ExtractAll(z, path=subdir)
            dep_subdirs.append(subdir)

        # Generate R.java. This R.java contains non-final constants and is used only
        # while compiling the library jar (e.g. chromium_content.jar). When building
        # an apk, a new R.java file with the correct resource -> ID mappings will be
        # generated by merging the resources from all libraries and the main apk
        # project.
        package_command = [
            aapt, 'package', '-m', '-M', options.android_manifest,
            '--auto-add-overlay', '-I', android_jar, '--output-text-symbols',
            gen_dir, '-J', gen_dir, '--ignore-assets',
            build_utils.AAPT_IGNORE_PATTERN
        ]

        for d in input_resource_dirs:
            package_command += ['-S', d]

        for d in dep_subdirs:
            package_command += ['-S', d]

        if options.non_constant_id:
            package_command.append('--non-constant-id')
        if options.custom_package:
            package_command += ['--custom-package', options.custom_package]
        if options.proguard_file:
            package_command += ['-G', options.proguard_file]
        if options.shared_resources:
            package_command.append('--shared-lib')
        build_utils.CheckOutput(package_command, print_stderr=False)

        if options.extra_res_packages:
            CreateExtraRJavaFiles(
                gen_dir, build_utils.ParseGypList(options.extra_res_packages),
                build_utils.ParseGypList(options.extra_r_text_files),
                options.shared_resources, options.include_all_resources)

        # This is the list of directories with resources to put in the final .zip
        # file. The order of these is important so that crunched/v14 resources
        # override the normal ones.
        zip_resource_dirs = input_resource_dirs + [v14_dir]

        base_crunch_dir = os.path.join(temp_dir, 'crunch')

        # Crunch image resources. This shrinks png files and is necessary for
        # 9-patch images to display correctly. 'aapt crunch' accepts only a single
        # directory at a time and deletes everything in the output directory.
        for idx, input_dir in enumerate(input_resource_dirs):
            crunch_dir = os.path.join(base_crunch_dir, str(idx))
            build_utils.MakeDirectory(crunch_dir)
            zip_resource_dirs.append(crunch_dir)
            CrunchDirectory(aapt, input_dir, crunch_dir)

        ZipResources(zip_resource_dirs, options.resource_zip_out)

        if options.all_resources_zip_out:
            CombineZips([options.resource_zip_out] + dep_zips,
                        options.all_resources_zip_out)

        if options.R_dir:
            build_utils.DeleteDirectory(options.R_dir)
            shutil.copytree(gen_dir, options.R_dir)
        else:
            build_utils.ZipDir(options.srcjar_out, gen_dir)

        if options.r_text_out:
            r_text_path = os.path.join(gen_dir, 'R.txt')
            if os.path.exists(r_text_path):
                shutil.copyfile(r_text_path, options.r_text_out)
            else:
                open(options.r_text_out, 'w').close()

    if options.depfile:
        input_files += build_utils.GetPythonDependencies()
        build_utils.WriteDepfile(options.depfile, input_files)

    if options.stamp:
        build_utils.Touch(options.stamp)
def main(args):
  args = build_utils.ExpandFileArgs(args)
  options = _ParseArgs(args)

  split_dimensions = []
  if options.split_dimensions:
    split_dimensions = [x.upper() for x in options.split_dimensions]


  with build_utils.TempDir() as tmp_dir:
    module_zips = [
        _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
        for module in options.module_zips]

    base_master_resource_ids = None
    if options.base_module_rtxt_path:
      base_master_resource_ids = _GenerateBaseResourcesWhitelist(
          options.base_module_rtxt_path, options.base_whitelist_rtxt_path)

    bundle_config = _GenerateBundleConfigJson(
        options.uncompressed_assets, options.compress_shared_libraries,
        split_dimensions, base_master_resource_ids)

    tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')

    tmp_unsigned_bundle = tmp_bundle
    if options.keystore_path:
      tmp_unsigned_bundle = tmp_bundle + '.unsigned'

    # Important: bundletool requires that the bundle config file is
    # named with a .pb.json extension.
    tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'

    with open(tmp_bundle_config, 'w') as f:
      f.write(bundle_config)

    cmd_args = ['java', '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-bundle']
    cmd_args += ['--modules=%s' % ','.join(module_zips)]
    cmd_args += ['--output=%s' % tmp_unsigned_bundle]
    cmd_args += ['--config=%s' % tmp_bundle_config]

    print ' '.join(cmd_args)
    build_utils.CheckOutput(cmd_args, print_stdout=True, print_stderr=True)

    if options.keystore_path:
      # NOTE: As stated by the public documentation, apksigner cannot be used
      # to sign the bundle (because it rejects anything that isn't an APK).
      # The signature and digest algorithm selection come from the internal
      # App Bundle documentation. There is no corresponding public doc :-(
      signing_cmd_args = [
          'jarsigner', '-sigalg', 'SHA256withRSA', '-digestalg', 'SHA-256',
          '-keystore', 'file:' + options.keystore_path,
          '-storepass' , options.keystore_password,
          '-signedjar', tmp_bundle,
          tmp_unsigned_bundle,
          options.key_name,
      ]
      build_utils.CheckOutput(signing_cmd_args, print_stderr=True)

    shutil.move(tmp_bundle, options.out_bundle)

  if options.rtxt_out_path:
    _ConcatTextFiles(options.rtxt_in_paths, options.rtxt_out_path)

  if options.pathmap_out_path:
    _WriteBundlePathmap(options.pathmap_in_paths, options.module_names,
                        options.pathmap_out_path)
Esempio n. 4
0
def _RunD8(dex_cmd, input_paths, output_path):
    dex_cmd += ['--output', output_path]
    dex_cmd += input_paths
    build_utils.CheckOutput(dex_cmd, print_stderr=False)
Esempio n. 5
0
def PackLibraryRelocations(android_pack_relocations, library_path, output_path):
  shutil.copy(library_path, output_path)
  pack_command = [android_pack_relocations, output_path]
  build_utils.CheckOutput(pack_command)
Esempio n. 6
0
def CallReadElf(library_or_executable):
  assert _readelf is not None
  readelf_cmd = [_readelf,
                 '-d',
                 FullLibraryPath(library_or_executable)]
  return build_utils.CheckOutput(readelf_cmd)
Esempio n. 7
0
def _RunLint(lint_binary_path,
             config_path,
             manifest_path,
             sources,
             classpath,
             cache_dir,
             android_sdk_version,
             srcjars,
             min_sdk_version,
             resource_sources,
             resource_zips,
             android_sdk_root,
             lint_gen_dir,
             baseline,
             testonly_target=False,
             warnings_as_errors=False,
             silent=False):
    logging.info('Lint starting')

    cmd = [
        _SrcRelative(lint_binary_path),
        # Consider all lint warnings as errors. Warnings should either always be
        # fixed or completely suppressed in suppressions.xml. They should not
        # bloat build output if they are not important enough to be fixed.
        '-Werror',
        '--exitcode',  # Sets error code if there are errors.
        '--quiet',  # Silences lint's "." progress updates.
    ]
    if baseline:
        cmd.extend(['--baseline', _SrcRelative(baseline)])
    if config_path:
        cmd.extend(['--config', _SrcRelative(config_path)])
    if testonly_target:
        cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])

    if not manifest_path:
        manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build',
                                     'android', 'AndroidManifest.xml')

    logging.info('Generating Android manifest file')
    android_manifest_tree = _GenerateAndroidManifest(manifest_path,
                                                     min_sdk_version)
    # Include the rebased manifest_path in the lint generated path so that it is
    # clear in error messages where the original AndroidManifest.xml came from.
    lint_android_manifest_path = os.path.join(lint_gen_dir,
                                              _SrcRelative(manifest_path))
    logging.info('Writing xml file %s', lint_android_manifest_path)
    _WriteXmlFile(android_manifest_tree.getroot(), lint_android_manifest_path)

    resource_root_dir = os.path.join(lint_gen_dir, _RES_ZIP_DIR)
    # These are zip files with generated resources (e. g. strings from GRD).
    logging.info('Extracting resource zips')
    for resource_zip in resource_zips:
        # Use a consistent root and name rather than a temporary file so that
        # suppressions can be local to the lint target and the resource target.
        resource_dir = os.path.join(resource_root_dir, resource_zip)
        shutil.rmtree(resource_dir, True)
        os.makedirs(resource_dir)
        resource_sources.extend(
            build_utils.ExtractAll(resource_zip, path=resource_dir))

    logging.info('Extracting srcjars')
    srcjar_root_dir = os.path.join(lint_gen_dir, _SRCJAR_DIR)
    srcjar_sources = []
    if srcjars:
        for srcjar in srcjars:
            # Use path without extensions since otherwise the file name includes
            # .srcjar and lint treats it as a srcjar.
            srcjar_dir = os.path.join(srcjar_root_dir,
                                      os.path.splitext(srcjar)[0])
            shutil.rmtree(srcjar_dir, True)
            os.makedirs(srcjar_dir)
            # Sadly lint's srcjar support is broken since it only considers the first
            # srcjar. Until we roll a lint version with that fixed, we need to extract
            # it ourselves.
            srcjar_sources.extend(
                build_utils.ExtractAll(srcjar, path=srcjar_dir))

    logging.info('Generating project file')
    project_file_root = _GenerateProjectFile(lint_android_manifest_path,
                                             android_sdk_root, cache_dir,
                                             sources, classpath,
                                             srcjar_sources, resource_sources,
                                             android_sdk_version)

    project_xml_path = os.path.join(lint_gen_dir, 'project.xml')
    logging.info('Writing xml file %s', project_xml_path)
    _WriteXmlFile(project_file_root, project_xml_path)
    cmd += ['--project', _SrcRelative(project_xml_path)]

    logging.info('Preparing environment variables')
    env = os.environ.copy()
    # It is important that lint uses the checked-in JDK11 as it is almost 50%
    # faster than JDK8.
    env['JAVA_HOME'] = os.path.relpath(build_utils.JAVA_HOME,
                                       build_utils.DIR_SOURCE_ROOT)
    # This filter is necessary for JDK11.
    stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings

    try:
        logging.debug('Lint command %s', cmd)
        start = time.time()
        # Lint outputs "No issues found" if it succeeds, and uses stderr when it
        # fails, so we can safely ignore stdout.
        build_utils.CheckOutput(cmd,
                                cwd=build_utils.DIR_SOURCE_ROOT,
                                env=env,
                                stderr_filter=stderr_filter)
        end = time.time() - start
        logging.info('Lint command took %ss', end)
    except build_utils.CalledProcessError as e:
        if not silent:
            print(
                'Lint found new issues.\n'
                ' - Here is the project.xml file passed to lint: {}\n'
                ' - For more information about lint and how to fix lint issues,'
                ' please refer to {}\n'.format(_SrcRelative(project_xml_path),
                                               _LINT_MD_URL))
            if warnings_as_errors:
                raise
            else:
                print(e)
    else:
        # Lint succeeded, no need to keep generated files for debugging purposes.
        shutil.rmtree(resource_root_dir, ignore_errors=True)
        shutil.rmtree(srcjar_root_dir, ignore_errors=True)

    logging.info('Lint completed')
Esempio n. 8
0
def _OptimizeWithR8(options,
                    config_paths,
                    libraries,
                    dynamic_config_data,
                    print_stdout=False):
    with build_utils.TempDir() as tmp_dir:
        if dynamic_config_data:
            tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt')
            with open(tmp_config_path, 'w') as f:
                f.write(dynamic_config_data)
            config_paths = config_paths + [tmp_config_path]

        tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
        # If there is no output (no classes are kept), this prevents this script
        # from failing.
        build_utils.Touch(tmp_mapping_path)

        tmp_output = os.path.join(tmp_dir, 'r8out')
        os.mkdir(tmp_output)

        feature_contexts = []
        if options.feature_names:
            for name, dest_dex, input_paths in zip(options.feature_names,
                                                   options.dex_dests,
                                                   options.feature_jars):
                feature_context = _DexPathContext(name, dest_dex, input_paths,
                                                  tmp_output)
                if name == 'base':
                    base_dex_context = feature_context
                else:
                    feature_contexts.append(feature_context)
        else:
            base_dex_context = _DexPathContext('base', options.output_path,
                                               options.input_paths, tmp_output)

        cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
            '-Dcom.android.tools.r8.allowTestProguardOptions=1',
            '-Dcom.android.tools.r8.verticalClassMerging=1',
        ]
        if options.disable_outlining:
            cmd += ['-Dcom.android.tools.r8.disableOutlining=1']
        if options.dump_inputs:
            cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip']
        cmd += [
            '-cp',
            options.r8_path,
            'com.android.tools.r8.R8',
            '--no-data-resources',
            '--output',
            base_dex_context.staging_dir,
            '--pg-map-output',
            tmp_mapping_path,
        ]

        if options.disable_checks:
            # Info level priority logs are not printed by default.
            cmd += [
                '--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info'
            ]

        if options.desugar_jdk_libs_json:
            cmd += [
                '--desugared-lib',
                options.desugar_jdk_libs_json,
                '--desugared-lib-pg-conf-output',
                options.desugared_library_keep_rule_output,
            ]

        if options.min_api:
            cmd += ['--min-api', options.min_api]

        if options.force_enable_assertions:
            cmd += ['--force-enable-assertions']

        for lib in libraries:
            cmd += ['--lib', lib]

        for config_file in config_paths:
            cmd += ['--pg-conf', config_file]

        if options.main_dex_rules_path:
            for main_dex_rule in options.main_dex_rules_path:
                cmd += ['--main-dex-rules', main_dex_rule]

        base_jars = set(base_dex_context.input_paths)
        input_path_map = defaultdict(set)
        for feature in feature_contexts:
            parent = options.uses_split.get(feature.name, feature.name)
            input_path_map[parent].update(feature.input_paths)

        # If a jar is present in multiple features, it should be moved to the base
        # module.
        all_feature_jars = set()
        for input_paths in input_path_map.values():
            base_jars.update(all_feature_jars.intersection(input_paths))
            all_feature_jars.update(input_paths)

        module_input_jars = base_jars.copy()
        for feature in feature_contexts:
            input_paths = input_path_map.get(feature.name)
            # Input paths can be missing for a child feature present in the uses_split
            # map. These features get their input paths added to the parent, and are
            # split out later with DexSplitter.
            if input_paths is None:
                continue
            feature_input_jars = [
                p for p in input_paths if p not in module_input_jars
            ]
            module_input_jars.update(feature_input_jars)
            for in_jar in feature_input_jars:
                cmd += ['--feature', in_jar, feature.staging_dir]

        cmd += sorted(base_jars)
        # Add any extra input jars to the base module (e.g. desugar runtime).
        extra_jars = set(options.input_paths) - module_input_jars
        cmd += sorted(extra_jars)

        try:
            stderr_filter = dex.CreateStderrFilter(
                options.show_desugar_default_interface_warnings)
            logging.debug('Running R8')
            build_utils.CheckOutput(cmd,
                                    print_stdout=print_stdout,
                                    stderr_filter=stderr_filter,
                                    fail_on_output=options.warnings_as_errors)
        except build_utils.CalledProcessError as err:
            debugging_link = ('\n\nR8 failed. Please see {}.'.format(
                'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
                'android/docs/java_optimization.md#Debugging-common-failures\n'
            ))
            raise build_utils.CalledProcessError(err.cwd, err.args,
                                                 err.output + debugging_link)

        base_has_imported_lib = False
        if options.desugar_jdk_libs_json:
            logging.debug('Running L8')
            existing_files = build_utils.FindInDirectory(
                base_dex_context.staging_dir)
            jdk_dex_output = os.path.join(
                base_dex_context.staging_dir,
                'classes%d.dex' % (len(existing_files) + 1))
            base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
                options.r8_path, options.min_api,
                options.desugar_jdk_libs_json, options.desugar_jdk_libs_jar,
                options.desugar_jdk_libs_configuration_jar,
                options.desugared_library_keep_rule_output, jdk_dex_output,
                options.warnings_as_errors)
            if int(options.min_api) >= 24 and base_has_imported_lib:
                with open(jdk_dex_output, 'rb') as f:
                    dexfile = dex_parser.DexFile(bytearray(f.read()))
                    for m in dexfile.IterMethodSignatureParts():
                        print('{}#{}'.format(m[0], m[2]))
                assert False, (
                    'Desugared JDK libs are disabled on Monochrome and newer - see '
                    'crbug.com/1159984 for details, and see above list for desugared '
                    'classes and methods.')

        if options.uses_split:
            _SplitChildFeatures(options, feature_contexts, base_dex_context,
                                tmp_dir, tmp_mapping_path, print_stdout)

        logging.debug('Collecting ouputs')
        base_dex_context.CreateOutput(
            base_has_imported_lib, options.desugared_library_keep_rule_output)
        for feature in feature_contexts:
            feature.CreateOutput()

        with open(options.mapping_output, 'w') as out_file, \
            open(tmp_mapping_path) as in_file:
            # Mapping files generated by R8 include comments that may break
            # some of our tooling so remove those (specifically: apkanalyzer).
            out_file.writelines(l for l in in_file if not l.startswith('#'))
Esempio n. 9
0
def _CheckForMissingSymbols(r8_path,
                            dex_files,
                            classpath,
                            warnings_as_errors,
                            error_message=None):
    cmd = build_utils.JavaCmd(warnings_as_errors) + [
        '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
        '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
        '--check'
    ]

    for path in classpath:
        cmd += ['--lib', path]
    for path in dex_files:
        cmd += ['--source', path]

    def stderr_filter(stderr):
        ignored_lines = [
            # Summary contains warning count, which our filtering makes wrong.
            'Warning: Tracereferences found',

            # TODO(agrieve): Create interface jars for these missing classes rather
            #     than allowlisting here.
            'dalvik/system',
            'libcore/io',
            'sun/misc/Unsafe',

            # Found in: com/facebook/fbui/textlayoutbuilder/StaticLayoutHelper
            ('android/text/StaticLayout;<init>(Ljava/lang/CharSequence;IILandroid'
             '/text/TextPaint;ILandroid/text/Layout$Alignment;Landroid/text/'
             'TextDirectionHeuristic;FFZLandroid/text/TextUtils$TruncateAt;II)V'
             ),

            # Found in
            # com/google/android/gms/cast/framework/media/internal/ResourceProvider
            # Missing due to setting "strip_resources = true".
            'com/google/android/gms/cast/framework/R',

            # Found in com/google/android/gms/common/GoogleApiAvailability
            # Missing due to setting "strip_drawables = true".
            'com/google/android/gms/base/R$drawable',

            # Explicictly guarded by try (NoClassDefFoundError) in Flogger's
            # PlatformProvider.
            'com/google/common/flogger/backend/google/GooglePlatform',
            'com/google/common/flogger/backend/system/DefaultPlatform',

            # trichrome_webview_google_bundle contains this missing reference.
            # TODO(crbug.com/1142530): Fix this missing reference properly.
            'org/chromium/base/library_loader/NativeLibraries',

            # TODO(agrieve): Exclude these only when use_jacoco_coverage=true.
            'Ljava/lang/instrument/ClassFileTransformer',
            'Ljava/lang/instrument/IllegalClassFormatException',
            'Ljava/lang/instrument/Instrumentation',
            'Ljava/lang/management/ManagementFactory',
            'Ljavax/management/MBeanServer',
            'Ljavax/management/ObjectInstance',
            'Ljavax/management/ObjectName',
            'Ljavax/management/StandardMBean',
        ]

        had_unfiltered_items = '  ' in stderr
        stderr = build_utils.FilterLines(
            stderr, '|'.join(re.escape(x) for x in ignored_lines))
        if stderr:
            if '  ' in stderr:
                if error_message is None:
                    stderr = """
DEX contains references to non-existent symbols after R8 optimization.
Tip: Build with:
        is_java_debug=false
        treat_warnings_as_errors=false
        enable_proguard_obfuscation=false
     and then use dexdump to see which class(s) reference them.

     E.g.:
       third_party/android_sdk/public/build-tools/*/dexdump -d \
out/Release/apks/YourApk.apk > dex.txt
""" + stderr
                else:
                    stderr = error_message + stderr
            elif had_unfiltered_items:
                # Left only with empty headings. All indented items filtered out.
                stderr = ''
        return stderr

    logging.debug('cmd: %s', ' '.join(cmd))
    build_utils.CheckOutput(cmd,
                            print_stdout=True,
                            stderr_filter=stderr_filter,
                            fail_on_output=warnings_as_errors)
Esempio n. 10
0
def _OnStaleMd5(lint_path,
                config_path,
                processed_config_path,
                manifest_path,
                result_path,
                product_dir,
                sources,
                jar_path,
                cache_dir,
                android_sdk_version,
                srcjars,
                resource_sources,
                disable=None,
                classpath=None,
                can_fail_build=False,
                include_unexpected=False,
                silent=False):
    def _RebasePath(path):
        """Returns relative path to top-level src dir.

    Args:
      path: A path relative to cwd.
    """
        ret = os.path.relpath(os.path.abspath(path),
                              build_utils.DIR_SOURCE_ROOT)
        # If it's outside of src/, just use abspath.
        if ret.startswith('..'):
            ret = os.path.abspath(path)
        return ret

    def _ProcessConfigFile():
        if not config_path or not processed_config_path:
            return
        if not build_utils.IsTimeStale(processed_config_path, [config_path]):
            return

        with open(config_path, 'rb') as f:
            content = f.read().replace('PRODUCT_DIR', _RebasePath(product_dir))

        with open(processed_config_path, 'wb') as f:
            f.write(content)

    def _ProcessResultFile():
        with open(result_path, 'rb') as f:
            content = f.read().replace(_RebasePath(product_dir), 'PRODUCT_DIR')

        with open(result_path, 'wb') as f:
            f.write(content)

    def _ParseAndShowResultFile():
        dom = minidom.parse(result_path)
        issues = dom.getElementsByTagName('issue')
        if not silent:
            print >> sys.stderr
            for issue in issues:
                issue_id = issue.attributes['id'].value
                message = issue.attributes['message'].value
                location_elem = issue.getElementsByTagName('location')[0]
                path = location_elem.attributes['file'].value
                line = location_elem.getAttribute('line')
                if line:
                    error = '%s:%s %s: %s [warning]' % (path, line, message,
                                                        issue_id)
                else:
                    # Issues in class files don't have a line number.
                    error = '%s %s: %s [warning]' % (path, message, issue_id)
                print >> sys.stderr, error.encode('utf-8')
                for attr in ['errorLine1', 'errorLine2']:
                    error_line = issue.getAttribute(attr)
                    if error_line:
                        print >> sys.stderr, error_line.encode('utf-8')
        return len(issues)

    with build_utils.TempDir() as temp_dir:
        _ProcessConfigFile()

        cmd = [
            _RebasePath(lint_path),
            '-Werror',
            '--exitcode',
            '--showall',
            '--xml',
            _RebasePath(result_path),
        ]
        if jar_path:
            # --classpath is just for .class files for this one target.
            cmd.extend(['--classpath', _RebasePath(jar_path)])
        if processed_config_path:
            cmd.extend(['--config', _RebasePath(processed_config_path)])

        tmp_dir_counter = [0]

        def _NewTempSubdir(prefix, append_digit=True):
            # Helper function to create a new sub directory based on the number of
            # subdirs created earlier.
            if append_digit:
                tmp_dir_counter[0] += 1
                prefix += str(tmp_dir_counter[0])
            new_dir = os.path.join(temp_dir, prefix)
            os.makedirs(new_dir)
            return new_dir

        resource_dirs = []
        for resource_source in resource_sources:
            if os.path.isdir(resource_source):
                resource_dirs.append(resource_source)
            else:
                # This is a zip file with generated resources (e. g. strings from GRD).
                # Extract it to temporary folder.
                resource_dir = _NewTempSubdir(resource_source,
                                              append_digit=False)
                resource_dirs.append(resource_dir)
                build_utils.ExtractAll(resource_source, path=resource_dir)

        for resource_dir in resource_dirs:
            cmd.extend(['--resources', _RebasePath(resource_dir)])

        if classpath:
            # --libraries is the classpath (excluding active target).
            cp = ':'.join(_RebasePath(p) for p in classpath)
            cmd.extend(['--libraries', cp])

        # There may be multiple source files with the same basename (but in
        # different directories). It is difficult to determine what part of the path
        # corresponds to the java package, and so instead just link the source files
        # into temporary directories (creating a new one whenever there is a name
        # conflict).
        def PathInDir(d, src):
            subpath = os.path.join(d, _RebasePath(src))
            subdir = os.path.dirname(subpath)
            if not os.path.exists(subdir):
                os.makedirs(subdir)
            return subpath

        src_dirs = []
        for src in sources:
            src_dir = None
            for d in src_dirs:
                if not os.path.exists(PathInDir(d, src)):
                    src_dir = d
                    break
            if not src_dir:
                src_dir = _NewTempSubdir('SRC_ROOT')
                src_dirs.append(src_dir)
                cmd.extend(['--sources', _RebasePath(src_dir)])
            if os.path.abspath(src) != PathInDir(src_dir, src):
                os.symlink(os.path.abspath(src), PathInDir(src_dir, src))

        if srcjars:
            srcjar_paths = build_utils.ParseGnList(srcjars)
            if srcjar_paths:
                srcjar_dir = _NewTempSubdir('SRC_ROOT')
                cmd.extend(['--sources', _RebasePath(srcjar_dir)])
                for srcjar in srcjar_paths:
                    build_utils.ExtractAll(srcjar, path=srcjar_dir)

        if disable:
            cmd.extend(['--disable', ','.join(disable)])

        project_dir = _NewTempSubdir('SRC_ROOT')
        if android_sdk_version:
            # Create dummy project.properies file in a temporary "project" directory.
            # It is the only way to add Android SDK to the Lint's classpath. Proper
            # classpath is necessary for most source-level checks.
            with open(os.path.join(project_dir, 'project.properties'), 'w') \
                as propfile:
                print >> propfile, 'target=android-{}'.format(
                    android_sdk_version)

        # Put the manifest in a temporary directory in order to avoid lint detecting
        # sibling res/ and src/ directories (which should be pass explicitly if they
        # are to be included).
        if not manifest_path:
            manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build',
                                         'android', 'AndroidManifest.xml')
        os.symlink(os.path.abspath(manifest_path),
                   os.path.join(project_dir, 'AndroidManifest.xml'))
        cmd.append(project_dir)

        if os.path.exists(result_path):
            os.remove(result_path)

        env = os.environ.copy()
        stderr_filter = None
        if cache_dir:
            env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RebasePath(cache_dir)
            # When _JAVA_OPTIONS is set, java prints to stderr:
            # Picked up _JAVA_OPTIONS: ...
            #
            # We drop all lines that contain _JAVA_OPTIONS from the output
            stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)

        def fail_func(returncode, stderr):
            if returncode != 0:
                return True
            if (include_unexpected
                    and 'Unexpected failure during lint analysis' in stderr):
                return True
            return False

        try:
            build_utils.CheckOutput(cmd,
                                    cwd=build_utils.DIR_SOURCE_ROOT,
                                    env=env or None,
                                    stderr_filter=stderr_filter,
                                    fail_func=fail_func)
        except build_utils.CalledProcessError:
            # There is a problem with lint usage
            if not os.path.exists(result_path):
                raise

            # Sometimes produces empty (almost) files:
            if os.path.getsize(result_path) < 10:
                if can_fail_build:
                    raise
                elif not silent:
                    traceback.print_exc()
                return

            # There are actual lint issues
            try:
                num_issues = _ParseAndShowResultFile()
            except Exception:  # pylint: disable=broad-except
                if not silent:
                    print 'Lint created unparseable xml file...'
                    print 'File contents:'
                    with open(result_path) as f:
                        print f.read()
                    if can_fail_build:
                        traceback.print_exc()
                if can_fail_build:
                    raise
                else:
                    return

            _ProcessResultFile()
            if num_issues == 0 and include_unexpected:
                msg = 'Please refer to output above for unexpected lint failures.\n'
            else:
                msg = (
                    '\nLint found %d new issues.\n'
                    ' - For full explanation, please refer to %s\n'
                    ' - For more information about lint and how to fix lint issues,'
                    ' please refer to %s\n' %
                    (num_issues, _RebasePath(result_path), _LINT_MD_URL))
            if not silent:
                print >> sys.stderr, msg
            if can_fail_build:
                raise Exception('Lint failed.')
Esempio n. 11
0
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs,
                runtime_classpath):
    with build_utils.TempDir() as temp_dir:
        srcjars = options.java_srcjars
        # The .excluded.jar contains .class files excluded from the main jar.
        # It is used for incremental compiles.
        excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar')

        classes_dir = os.path.join(temp_dir, 'classes')
        os.makedirs(classes_dir)

        changed_paths = None
        if options.incremental and changes.AddedOrModifiedOnly():
            changed_paths = set(changes.IterChangedPaths())
            # Do a full compile if classpath has changed.
            if any(p in changed_paths for p in classpath_inputs):
                changed_paths = None
            else:
                java_files = [p for p in java_files if p in changed_paths]
                srcjars = [p for p in srcjars if p in changed_paths]

        if srcjars:
            java_dir = os.path.join(temp_dir, 'java')
            os.makedirs(java_dir)
            for srcjar in options.java_srcjars:
                extract_predicate = None
                if changed_paths:
                    changed_subpaths = set(changes.IterChangedSubpaths(srcjar))
                    extract_predicate = lambda p: p in changed_subpaths
                build_utils.ExtractAll(srcjar,
                                       path=java_dir,
                                       pattern='*.java',
                                       predicate=extract_predicate)
            jar_srcs = build_utils.FindInDirectory(java_dir, '*.java')
            java_files.extend(
                _FilterJavaFiles(jar_srcs, options.javac_includes))

        if java_files:
            if changed_paths:
                # When no files have been removed and the output jar already
                # exists, reuse .class files from the existing jar.
                _ExtractClassFiles(options.jar_path, classes_dir, java_files)
                _ExtractClassFiles(excluded_jar_path, classes_dir, java_files)
                # Add the extracted files to the classpath.
                classpath_idx = javac_cmd.index('-classpath')
                javac_cmd[classpath_idx + 1] += ':' + classes_dir

            # Don't include the output directory in the initial set of args since it
            # being in a temp dir makes it unstable (breaks md5 stamping).
            cmd = javac_cmd + ['-d', classes_dir] + java_files

            build_utils.CheckOutput(cmd,
                                    print_stdout=options.chromium_code,
                                    stderr_filter=ColorJavacOutput)

        if options.main_class or options.manifest_entry:
            entries = []
            if options.manifest_entry:
                entries = [e.split(':') for e in options.manifest_entry]
            manifest_file = os.path.join(temp_dir, 'manifest')
            _CreateManifest(manifest_file, runtime_classpath,
                            options.main_class, entries)
        else:
            manifest_file = None

        glob = options.jar_excluded_classes
        inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob)
        exclusion_predicate = lambda f: not inclusion_predicate(f)

        jar.JarDirectory(classes_dir,
                         options.jar_path,
                         manifest_file=manifest_file,
                         predicate=inclusion_predicate)
        jar.JarDirectory(classes_dir,
                         excluded_jar_path,
                         predicate=exclusion_predicate)
Esempio n. 12
0
def main(argv):
    argv = build_utils.ExpandFileArgs(argv)
    parser = argparse.ArgumentParser(description=__doc__)
    build_utils.AddDepfileOption(parser)
    parser.add_argument(
        '--android-sdk-tools',
        help='Path to root of SDK providing the manifest merger tool.',
        required=True)
    parser.add_argument(
        '--android-sdk-tools-version-suffix',
        help='Version suffix for SDK providing the manifest merger tool.',
        required=True)
    parser.add_argument('--root-manifest',
                        help='Root manifest which to merge into',
                        required=True)
    parser.add_argument('--output', help='Output manifest path', required=True)
    parser.add_argument('--extras',
                        help='GN list of additional manifest to merge')
    parser.add_argument('--min-sdk-version',
                        required=True,
                        help='android:minSdkVersion for merging.')
    parser.add_argument('--target-sdk-version',
                        required=True,
                        help='android:targetSdkVersion for merging.')
    parser.add_argument('--max-sdk-version',
                        help='android:maxSdkVersion for merging.')
    parser.add_argument('--manifest-package',
                        help='Package name of the merged AndroidManifest.xml.')
    args = parser.parse_args(argv)

    classpath = _BuildManifestMergerClasspath(
        args.android_sdk_tools, args.android_sdk_tools_version_suffix)

    with build_utils.AtomicOutput(args.output) as output:
        cmd = [
            build_utils.JAVA_PATH,
            '-cp',
            classpath,
            _MANIFEST_MERGER_MAIN_CLASS,
            '--out',
            output.name,
            '--property',
            'MIN_SDK_VERSION=' + args.min_sdk_version,
            '--property',
            'TARGET_SDK_VERSION=' + args.target_sdk_version,
        ]

        if args.max_sdk_version:
            cmd += [
                '--property',
                'MAX_SDK_VERSION=' + args.max_sdk_version,
            ]

        extras = build_utils.ParseGnList(args.extras)
        if extras:
            cmd += ['--libs', ':'.join(extras)]

        with _ProcessManifest(args.root_manifest, args.min_sdk_version,
                              args.target_sdk_version, args.max_sdk_version,
                              args.manifest_package) as tup:
            root_manifest, package = tup
            cmd += [
                '--main',
                root_manifest,
                '--property',
                'PACKAGE=' + package,
            ]
            build_utils.CheckOutput(
                cmd,
                # https://issuetracker.google.com/issues/63514300:
                # The merger doesn't set a nonzero exit code for failures.
                fail_func=lambda returncode, stderr: returncode != 0 or
                build_utils.IsTimeStale(output.name, [root_manifest] + extras))

        # Check for correct output.
        _, manifest, _ = manifest_utils.ParseManifest(output.name)
        manifest_utils.AssertUsesSdk(manifest, args.min_sdk_version,
                                     args.target_sdk_version)
        manifest_utils.AssertPackage(manifest, package)

    if args.depfile:
        inputs = extras + classpath.split(':')
        build_utils.WriteDepfile(args.depfile,
                                 args.output,
                                 inputs=inputs,
                                 add_pydeps=False)
Esempio n. 13
0
def _PackageApk(options, build):
  """Compile and link resources with aapt2.

  Args:
    options: The command-line options.
    build: BuildContext object.
  Returns:
    The manifest package name for the APK.
  """
  dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
                                           build.deps_dir)
  path_info = resource_utils.ResourceInfoFile()
  _DuplicateZhResources(dep_subdirs, path_info)
  _RenameLocaleResourceDirs(dep_subdirs, path_info)

  _RemoveUnwantedLocalizedStrings(dep_subdirs, options)

  # Create a function that selects which resource files should be packaged
  # into the final output. Any file that does not pass the predicate will
  # be removed below.
  keep_predicate = _CreateKeepPredicate(options.resource_blacklist_regex,
                                        options.resource_blacklist_exceptions)
  png_paths = []
  for directory in dep_subdirs:
    for f in _IterFiles(directory):
      if not keep_predicate(f):
        os.remove(f)
      elif f.endswith('.png'):
        png_paths.append((f, directory))
  if png_paths and options.png_to_webp:
    _ConvertToWebP(options.webp_binary, png_paths, path_info)
  for directory in dep_subdirs:
    _MoveImagesToNonMdpiFolders(directory, path_info)
    _RemoveImageExtensions(directory, path_info)

  link_command = [
      options.aapt2_path,
      'link',
      '--auto-add-overlay',
      '--no-version-vectors',
      # Set SDK versions in case they are not set in the Android manifest.
      '--min-sdk-version',
      options.min_sdk_version,
      '--target-sdk-version',
      options.target_sdk_version,
  ]

  for j in options.include_resources:
    link_command += ['-I', j]
  if options.version_code:
    link_command += ['--version-code', options.version_code]
  if options.version_name:
    link_command += ['--version-name', options.version_name]
  if options.proguard_file:
    link_command += ['--proguard', build.proguard_path]
    link_command += ['--proguard-minimal-keep-rules']
  if options.proguard_file_main_dex:
    link_command += ['--proguard-main-dex', build.proguard_main_dex_path]
  if options.emit_ids_out:
    link_command += ['--emit-ids', build.emit_ids_path]
  if options.r_text_in:
    shutil.copyfile(options.r_text_in, build.r_txt_path)
  else:
    link_command += ['--output-text-symbols', build.r_txt_path]

  # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib
  #       can be used with recent versions of aapt2.
  if options.shared_resources and not options.proto_path:
    link_command.append('--shared-lib')

  if options.no_xml_namespaces:
    link_command.append('--no-xml-namespaces')

  if options.package_id:
    link_command += [
        '--package-id',
        hex(options.package_id),
        '--allow-reserved-package-id',
    ]

  fixed_manifest, desired_manifest_package_name = _FixManifest(
      options, build.temp_dir)
  if options.rename_manifest_package:
    desired_manifest_package_name = options.rename_manifest_package
  if options.android_manifest_expected:
    _VerifyManifest(fixed_manifest, options.android_manifest_expected,
                    options.android_manifest_normalized,
                    options.android_manifest_expectations_failure_file)

  link_command += [
      '--manifest', fixed_manifest, '--rename-manifest-package',
      desired_manifest_package_name
  ]

  # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
  # Also creates R.txt
  if options.use_resource_ids_path:
    _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path,
                         desired_manifest_package_name)
    link_command += ['--stable-ids', build.stable_ids_path]

  partials = _CompileDeps(options.aapt2_path, dep_subdirs, build.temp_dir)
  for partial in partials:
    link_command += ['-R', partial]

  if options.proto_path:
    link_command += ['--proto-format', '-o', build.proto_path]
  else:
    link_command += ['-o', build.arsc_path]

  link_proc = subprocess.Popen(link_command)

  # Create .res.info file in parallel.
  _CreateResourceInfoFile(path_info, build.info_path,
                          options.dependencies_res_zips)

  exit_code = link_proc.wait()
  if exit_code:
    raise subprocess.CalledProcessError(exit_code, link_command)

  if options.proguard_file and (options.shared_resources
                                or options.app_as_shared_lib):
    # Make sure the R class associated with the manifest package does not have
    # its onResourcesLoaded method obfuscated or removed, so that the framework
    # can call it in the case where the APK is being loaded as a library.
    with open(build.proguard_path, 'a') as proguard_file:
      keep_rule = '''
                  -keep class {package}.R {{
                    public static void onResourcesLoaded(int);
                  }}
                  '''.format(package=desired_manifest_package_name)
      proguard_file.write(textwrap.dedent(keep_rule))

  if options.proto_path and options.arsc_path:
    build_utils.CheckOutput([
        options.aapt2_path, 'convert', '-o', build.arsc_path, build.proto_path
    ])

  if options.optimized_proto_path:
    _OptimizeApk(build.optimized_proto_path, options, build.temp_dir,
                 build.proto_path, build.r_txt_path)
  elif options.optimized_arsc_path:
    _OptimizeApk(build.optimized_arsc_path, options, build.temp_dir,
                 build.arsc_path, build.r_txt_path)

  return desired_manifest_package_name
Esempio n. 14
0
def _RunLint(lint_binary_path,
             backported_methods_path,
             config_path,
             manifest_path,
             extra_manifest_paths,
             sources,
             classpath,
             cache_dir,
             android_sdk_version,
             aars,
             srcjars,
             min_sdk_version,
             resource_sources,
             resource_zips,
             android_sdk_root,
             lint_gen_dir,
             baseline,
             testonly_target=False,
             warnings_as_errors=False):
  logging.info('Lint starting')

  cmd = [
      lint_binary_path,
      '--quiet',  # Silences lint's "." progress updates.
      '--disable',
      ','.join(_DISABLED_ALWAYS),
  ]
  if baseline:
    cmd.extend(['--baseline', baseline])
  if testonly_target:
    cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])

  if not manifest_path:
    manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build',
                                 'android', 'AndroidManifest.xml')

  logging.info('Generating config.xml')
  backported_methods = _RetrieveBackportedMethods(backported_methods_path)
  config_xml_node = _GenerateConfigXmlTree(config_path, backported_methods)
  generated_config_path = os.path.join(lint_gen_dir, 'config.xml')
  _WriteXmlFile(config_xml_node, generated_config_path)
  cmd.extend(['--config', generated_config_path])

  logging.info('Generating Android manifest file')
  android_manifest_tree = _GenerateAndroidManifest(manifest_path,
                                                   extra_manifest_paths,
                                                   min_sdk_version,
                                                   android_sdk_version)
  # Include the rebased manifest_path in the lint generated path so that it is
  # clear in error messages where the original AndroidManifest.xml came from.
  lint_android_manifest_path = os.path.join(lint_gen_dir, manifest_path)
  _WriteXmlFile(android_manifest_tree.getroot(), lint_android_manifest_path)

  resource_root_dir = os.path.join(lint_gen_dir, _RES_ZIP_DIR)
  # These are zip files with generated resources (e. g. strings from GRD).
  logging.info('Extracting resource zips')
  for resource_zip in resource_zips:
    # Use a consistent root and name rather than a temporary file so that
    # suppressions can be local to the lint target and the resource target.
    resource_dir = os.path.join(resource_root_dir, resource_zip)
    shutil.rmtree(resource_dir, True)
    os.makedirs(resource_dir)
    resource_sources.extend(
        build_utils.ExtractAll(resource_zip, path=resource_dir))

  logging.info('Extracting aars')
  aar_root_dir = os.path.join(lint_gen_dir, _AAR_DIR)
  custom_lint_jars = []
  custom_annotation_zips = []
  if aars:
    for aar in aars:
      # Use relative source for aar files since they are not generated.
      aar_dir = os.path.join(aar_root_dir,
                             os.path.splitext(_SrcRelative(aar))[0])
      shutil.rmtree(aar_dir, True)
      os.makedirs(aar_dir)
      aar_files = build_utils.ExtractAll(aar, path=aar_dir)
      for f in aar_files:
        if f.endswith('lint.jar'):
          custom_lint_jars.append(f)
        elif f.endswith('annotations.zip'):
          custom_annotation_zips.append(f)

  logging.info('Extracting srcjars')
  srcjar_root_dir = os.path.join(lint_gen_dir, _SRCJAR_DIR)
  srcjar_sources = []
  if srcjars:
    for srcjar in srcjars:
      # Use path without extensions since otherwise the file name includes
      # .srcjar and lint treats it as a srcjar.
      srcjar_dir = os.path.join(srcjar_root_dir, os.path.splitext(srcjar)[0])
      shutil.rmtree(srcjar_dir, True)
      os.makedirs(srcjar_dir)
      # Sadly lint's srcjar support is broken since it only considers the first
      # srcjar. Until we roll a lint version with that fixed, we need to extract
      # it ourselves.
      srcjar_sources.extend(build_utils.ExtractAll(srcjar, path=srcjar_dir))

  logging.info('Generating project file')
  project_file_root = _GenerateProjectFile(lint_android_manifest_path,
                                           android_sdk_root, cache_dir, sources,
                                           classpath, srcjar_sources,
                                           resource_sources, custom_lint_jars,
                                           custom_annotation_zips,
                                           android_sdk_version)

  project_xml_path = os.path.join(lint_gen_dir, 'project.xml')
  _WriteXmlFile(project_file_root, project_xml_path)
  cmd += ['--project', project_xml_path]

  logging.info('Preparing environment variables')
  env = os.environ.copy()
  # It is important that lint uses the checked-in JDK11 as it is almost 50%
  # faster than JDK8.
  env['JAVA_HOME'] = build_utils.JAVA_HOME
  # This is necessary so that lint errors print stack traces in stdout.
  env['LINT_PRINT_STACKTRACE'] = 'true'
  if baseline and not os.path.exists(baseline):
    # Generating new baselines is only done locally, and requires more memory to
    # avoid OOMs.
    env['LINT_OPTS'] = '-Xmx4g'
  else:
    # The default set in the wrapper script is 1g, but it seems not enough :(
    env['LINT_OPTS'] = '-Xmx2g'

  # This filter is necessary for JDK11.
  stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings
  stdout_filter = lambda x: build_utils.FilterLines(x, 'No issues found')

  start = time.time()
  logging.debug('Lint command %s', ' '.join(cmd))
  failed = True
  try:
    failed = bool(
        build_utils.CheckOutput(cmd,
                                env=env,
                                print_stdout=True,
                                stdout_filter=stdout_filter,
                                stderr_filter=stderr_filter,
                                fail_on_output=warnings_as_errors))
  finally:
    # When not treating warnings as errors, display the extra footer.
    is_debug = os.environ.get('LINT_DEBUG', '0') != '0'

    if failed:
      print('- For more help with lint in Chrome:', _LINT_MD_URL)
      if is_debug:
        print('- DEBUG MODE: Here is the project.xml: {}'.format(
            _SrcRelative(project_xml_path)))
      else:
        print('- Run with LINT_DEBUG=1 to enable lint configuration debugging')

    end = time.time() - start
    logging.info('Lint command took %ss', end)
    if not is_debug:
      shutil.rmtree(aar_root_dir, ignore_errors=True)
      shutil.rmtree(resource_root_dir, ignore_errors=True)
      shutil.rmtree(srcjar_root_dir, ignore_errors=True)
      os.unlink(project_xml_path)

  logging.info('Lint completed')
Esempio n. 15
0
def _OptimizeWithR8(options,
                    config_paths,
                    libraries,
                    dynamic_config_data,
                    print_stdout=False):
    with build_utils.TempDir() as tmp_dir:
        if dynamic_config_data:
            dynamic_config_path = os.path.join(tmp_dir, 'dynamic_config.flags')
            with open(dynamic_config_path, 'w') as f:
                f.write(dynamic_config_data)
            config_paths = config_paths + [dynamic_config_path]

        tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
        # If there is no output (no classes are kept), this prevents this script
        # from failing.
        build_utils.Touch(tmp_mapping_path)

        tmp_output = os.path.join(tmp_dir, 'r8out')
        os.mkdir(tmp_output)

        split_contexts_by_name = {}
        if options.feature_names:
            for name, dest_dex, input_jars in zip(options.feature_names,
                                                  options.dex_dests,
                                                  options.feature_jars):
                parent_name = options.uses_split.get(name)
                if parent_name is None and name != 'base':
                    parent_name = 'base'
                split_context = _SplitContext(name,
                                              dest_dex,
                                              input_jars,
                                              tmp_output,
                                              parent_name=parent_name)
                split_contexts_by_name[name] = split_context
        else:
            # Base context will get populated via "extra_jars" below.
            split_contexts_by_name['base'] = _SplitContext(
                'base', options.output_path, [], tmp_output)
        base_context = split_contexts_by_name['base']

        # R8 OOMs with the default xmx=1G.
        cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx='2G') + [
            '-Dcom.android.tools.r8.allowTestProguardOptions=1',
            '-Dcom.android.tools.r8.disableHorizontalClassMerging=1',
        ]
        if options.disable_outlining:
            cmd += ['-Dcom.android.tools.r8.disableOutlining=1']
        if options.dump_inputs:
            cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip']
        cmd += [
            '-cp',
            options.r8_path,
            'com.android.tools.r8.R8',
            '--no-data-resources',
            '--output',
            base_context.staging_dir,
            '--pg-map-output',
            tmp_mapping_path,
        ]

        if options.disable_checks:
            # Info level priority logs are not printed by default.
            cmd += [
                '--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info'
            ]

        if options.desugar_jdk_libs_json:
            cmd += [
                '--desugared-lib',
                options.desugar_jdk_libs_json,
                '--desugared-lib-pg-conf-output',
                options.desugared_library_keep_rule_output,
            ]

        if options.min_api:
            cmd += ['--min-api', options.min_api]

        if options.force_enable_assertions:
            cmd += ['--force-enable-assertions']

        for lib in libraries:
            cmd += ['--lib', lib]

        for config_file in config_paths:
            cmd += ['--pg-conf', config_file]

        if options.main_dex_rules_path:
            for main_dex_rule in options.main_dex_rules_path:
                cmd += ['--main-dex-rules', main_dex_rule]

        _DeDupeInputJars(split_contexts_by_name)

        # Add any extra inputs to the base context (e.g. desugar runtime).
        extra_jars = set(options.input_paths)
        for split_context in split_contexts_by_name.values():
            extra_jars -= split_context.input_jars
        base_context.input_jars.update(extra_jars)

        for split_context in split_contexts_by_name.values():
            if split_context is base_context:
                continue
            for in_jar in sorted(split_context.input_jars):
                cmd += ['--feature', in_jar, split_context.staging_dir]

        cmd += sorted(base_context.input_jars)

        try:
            stderr_filter = dex.CreateStderrFilter(
                options.show_desugar_default_interface_warnings)
            logging.debug('Running R8')
            build_utils.CheckOutput(cmd,
                                    print_stdout=print_stdout,
                                    stderr_filter=stderr_filter,
                                    fail_on_output=options.warnings_as_errors)
        except build_utils.CalledProcessError:
            # Python will print the original exception as well.
            raise Exception(
                'R8 failed. Please see '
                'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
                'android/docs/java_optimization.md#Debugging-common-failures')

        base_has_imported_lib = False
        if options.desugar_jdk_libs_json:
            logging.debug('Running L8')
            existing_files = build_utils.FindInDirectory(
                base_context.staging_dir)
            jdk_dex_output = os.path.join(
                base_context.staging_dir,
                'classes%d.dex' % (len(existing_files) + 1))
            # Use -applymapping to avoid name collisions.
            l8_dynamic_config_path = os.path.join(tmp_dir,
                                                  'l8_dynamic_config.flags')
            with open(l8_dynamic_config_path, 'w') as f:
                f.write("-applymapping '{}'\n".format(tmp_mapping_path))
            # Pass the dynamic config so that obfuscation options are picked up.
            l8_config_paths = [dynamic_config_path, l8_dynamic_config_path]
            if os.path.exists(options.desugared_library_keep_rule_output):
                l8_config_paths.append(
                    options.desugared_library_keep_rule_output)

            base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
                options.r8_path, options.min_api,
                options.desugar_jdk_libs_json, options.desugar_jdk_libs_jar,
                options.desugar_jdk_libs_configuration_jar, jdk_dex_output,
                options.warnings_as_errors, l8_config_paths)
            if int(options.min_api) >= 24 and base_has_imported_lib:
                with open(jdk_dex_output, 'rb') as f:
                    dexfile = dex_parser.DexFile(bytearray(f.read()))
                    for m in dexfile.IterMethodSignatureParts():
                        print('{}#{}'.format(m[0], m[2]))
                assert False, (
                    'Desugared JDK libs are disabled on Monochrome and newer - see '
                    'crbug.com/1159984 for details, and see above list for desugared '
                    'classes and methods.')

        logging.debug('Collecting ouputs')
        base_context.CreateOutput(base_has_imported_lib,
                                  options.desugared_library_keep_rule_output)
        for split_context in split_contexts_by_name.values():
            if split_context is not base_context:
                split_context.CreateOutput()

        with open(options.mapping_output, 'w') as out_file, \
            open(tmp_mapping_path) as in_file:
            # Mapping files generated by R8 include comments that may break
            # some of our tooling so remove those (specifically: apkanalyzer).
            out_file.writelines(l for l in in_file if not l.startswith('#'))
    return base_context
Esempio n. 16
0
def _SplitChildFeatures(options, feature_contexts, base_dex_context, tmp_dir,
                        mapping_path, print_stdout):
    feature_map = {f.name: f for f in feature_contexts}
    parent_to_child = defaultdict(list)
    for child, parent in options.uses_split.items():
        parent_to_child[parent].append(child)
    for parent, children in parent_to_child.items():
        split_output = os.path.join(tmp_dir, 'split_%s' % parent)
        os.mkdir(split_output)
        # DexSplitter is not perfect and can cause issues related to inlining and
        # class merging (see crbug.com/1032609). If strange class loading errors
        # happen in DFMs specifying uses_split, this may be the cause.
        split_cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
            '-cp',
            options.r8_path,
            'com.android.tools.r8.dexsplitter.DexSplitter',
            '--output',
            split_output,
            '--proguard-map',
            mapping_path,
        ]

        parent_jars = set(feature_map[parent].input_paths)
        for base_jar in sorted(parent_jars):
            split_cmd += ['--base-jar', base_jar]

        for child in children:
            for feature_jar in feature_map[child].input_paths:
                if feature_jar not in parent_jars:
                    split_cmd += [
                        '--feature-jar',
                        '%s:%s' % (feature_jar, child)
                    ]

        # The inputs are the outputs for the parent from the original R8 call.
        parent_dir = feature_map[parent].staging_dir
        for file_name in os.listdir(parent_dir):
            split_cmd += ['--input', os.path.join(parent_dir, file_name)]
        logging.debug('Running R8 DexSplitter')
        build_utils.CheckOutput(split_cmd,
                                print_stdout=print_stdout,
                                fail_on_output=options.warnings_as_errors)

        # Copy the parent dex back into the parent's staging dir.
        base_split_output = os.path.join(split_output, 'base')
        shutil.rmtree(parent_dir)
        os.mkdir(parent_dir)
        for dex_file in os.listdir(base_split_output):
            shutil.move(os.path.join(base_split_output, dex_file),
                        os.path.join(parent_dir, dex_file))

        # Copy each child dex back into the child's staging dir.
        for child in children:
            child_split_output = os.path.join(split_output, child)
            child_staging_dir = feature_map[child].staging_dir
            shutil.rmtree(child_staging_dir)
            os.mkdir(child_staging_dir)
            for dex_file in os.listdir(child_split_output):
                shutil.move(os.path.join(child_split_output, dex_file),
                            os.path.join(child_staging_dir, dex_file))

    if not options.disable_checks:
        logging.debug('Verifying dex files')
        _VerifySplitDexFiles(parent_to_child, feature_map, base_dex_context,
                             options)
Esempio n. 17
0
def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors,
                            error_title):
    cmd = build_utils.JavaCmd(warnings_as_errors) + [
        '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
        '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
        '--check'
    ]

    for path in classpath:
        cmd += ['--lib', path]
    for path in dex_files:
        cmd += ['--source', path]

    def stderr_filter(stderr):
        ignored_lines = [
            # Summary contains warning count, which our filtering makes wrong.
            'Warning: Tracereferences found',

            # TODO(agrieve): Create interface jars for these missing classes rather
            #     than allowlisting here.
            'dalvik.system',
            'libcore.io',
            'sun.misc.Unsafe',

            # Found in: com/facebook/fbui/textlayoutbuilder/StaticLayoutHelper
            'android.text.StaticLayout.<init>',

            # Explicictly guarded by try (NoClassDefFoundError) in Flogger's
            # PlatformProvider.
            'com.google.common.flogger.backend.google.GooglePlatform',
            'com.google.common.flogger.backend.system.DefaultPlatform',

            # trichrome_webview_google_bundle contains this missing reference.
            # TODO(crbug.com/1142530): Fix this missing reference properly.
            'org.chromium.build.NativeLibraries',

            # TODO(agrieve): Exclude these only when use_jacoco_coverage=true.
            'java.lang.instrument.ClassFileTransformer',
            'java.lang.instrument.IllegalClassFormatException',
            'java.lang.instrument.Instrumentation',
            'java.lang.management.ManagementFactory',
            'javax.management.MBeanServer',
            'javax.management.ObjectInstance',
            'javax.management.ObjectName',
            'javax.management.StandardMBean',

            # Explicitly guarded by try (NoClassDefFoundError) in Firebase's
            # KotlinDetector: com.google.firebase.platforminfo.KotlinDetector.
            'kotlin.KotlinVersion',
        ]

        had_unfiltered_items = '  ' in stderr
        stderr = build_utils.FilterLines(
            stderr, '|'.join(re.escape(x) for x in ignored_lines))
        if stderr:
            if '  ' in stderr:
                stderr = error_title + """
Tip: Build with:
        is_java_debug=false
        treat_warnings_as_errors=false
        enable_proguard_obfuscation=false
     and then use dexdump to see which class(s) reference them.

     E.g.:
       third_party/android_sdk/public/build-tools/*/dexdump -d \
out/Release/apks/YourApk.apk > dex.txt
""" + stderr

                if 'FragmentActivity' in stderr:
                    stderr += """
You may need to update build configs to run FragmentActivityReplacer for
additional targets. See
https://chromium.googlesource.com/chromium/src.git/+/main/docs/ui/android/bytecode_rewriting.md.
"""
            elif had_unfiltered_items:
                # Left only with empty headings. All indented items filtered out.
                stderr = ''
        return stderr

    logging.debug('cmd: %s', ' '.join(cmd))
    build_utils.CheckOutput(cmd,
                            print_stdout=True,
                            stderr_filter=stderr_filter,
                            fail_on_output=warnings_as_errors)
Esempio n. 18
0
def _RunLint(lint_path, config_path, processed_config_path, manifest_path,
             result_path, product_dir, src_dirs, classes_dir):
    def _RelativizePath(path):
        """Returns relative path to top-level src dir.

    Args:
      path: A path relative to cwd.
    """
        return os.path.relpath(os.path.abspath(path), _SRC_ROOT)

    def _ProcessConfigFile():
        if not build_utils.IsTimeStale(processed_config_path, [config_path]):
            return

        with open(config_path, 'rb') as f:
            content = f.read().replace('PRODUCT_DIR',
                                       _RelativizePath(product_dir))

        with open(processed_config_path, 'wb') as f:
            f.write(content)

    def _ProcessResultFile():
        with open(result_path, 'rb') as f:
            content = f.read().replace(_RelativizePath(product_dir),
                                       'PRODUCT_DIR')

        with open(result_path, 'wb') as f:
            f.write(content)

    def _ParseAndShowResultFile():
        dom = minidom.parse(result_path)
        issues = dom.getElementsByTagName('issue')
        print >> sys.stderr
        for issue in issues:
            issue_id = issue.attributes['id'].value
            severity = issue.attributes['severity'].value
            message = issue.attributes['message'].value
            location_elem = issue.getElementsByTagName('location')[0]
            path = location_elem.attributes['file'].value
            line = location_elem.getAttribute('line')
            if line:
                error = '%s:%s %s: %s [%s]' % (path, line, severity, message,
                                               issue_id)
            else:
                # Issues in class files don't have a line number.
                error = '%s %s: %s [%s]' % (path, severity, message, issue_id)
            print >> sys.stderr, error
            for attr in ['errorLine1', 'errorLine2']:
                error_line = issue.getAttribute(attr)
                if error_line:
                    print >> sys.stderr, error_line
        return len(issues)

    _ProcessConfigFile()

    cmd = [
        lint_path,
        '-Werror',
        '--exitcode',
        '--showall',
        '--config',
        _RelativizePath(processed_config_path),
        '--classpath',
        _RelativizePath(classes_dir),
        '--xml',
        _RelativizePath(result_path),
    ]
    for src in src_dirs:
        cmd.extend(['--sources', _RelativizePath(src)])
    cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir)))

    if os.path.exists(result_path):
        os.remove(result_path)

    try:
        build_utils.CheckOutput(cmd, cwd=_SRC_ROOT)
    except build_utils.CalledProcessError:
        # There is a problem with lint usage
        if not os.path.exists(result_path):
            raise
        # There are actual lint issues
        else:
            num_issues = _ParseAndShowResultFile()
            _ProcessResultFile()
            msg = ('\nLint found %d new issues.\n'
                   ' - For full explanation refer to %s\n'
                   ' - Wanna suppress these issues?\n'
                   '    1. Read comment in %s\n'
                   '    2. Run "python %s %s"\n' %
                   (num_issues, _RelativizePath(result_path),
                    _RelativizePath(config_path),
                    _RelativizePath(
                        os.path.join(
                            _SRC_ROOT, 'build', 'android', 'lint',
                            'suppress.py')), _RelativizePath(result_path)))
            print >> sys.stderr, msg
            # Lint errors do not fail the build.
            return 0

    return 0
Esempio n. 19
0
def StripLibrary(android_strip, android_strip_args, library_path, output_path):
    if build_utils.IsTimeStale(output_path, [library_path]):
        strip_cmd = ([android_strip] + android_strip_args +
                     ['-o', output_path, library_path])
        build_utils.CheckOutput(strip_cmd)
Esempio n. 20
0
def _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path):
  """Compile resources with aapt2 and generate intermediate .ap_ file.

  Args:
    options: The command-line options tuple. E.g. the generated apk
      will be written to |options.apk_path|.
    dep_subdirs: The list of directories where dependency resource zips
      were extracted (its content will be altered by this function).
    temp_dir: A temporary directory.
    gen_dir: Another temp directory where some intermediate files are
      generated.
    r_txt_path: The path where the R.txt file will written to.
  """
  renamed_paths = dict()
  renamed_paths.update(_DuplicateZhResources(dep_subdirs))
  renamed_paths.update(_RenameLocaleResourceDirs(dep_subdirs))

  _RemoveUnwantedLocalizedStrings(dep_subdirs, options)

  # Create a function that selects which resource files should be packaged
  # into the final output. Any file that does not pass the predicate will
  # be removed below.
  keep_predicate = _CreateKeepPredicate(dep_subdirs,
                                        options.resource_blacklist_regex,
                                        options.resource_blacklist_exceptions)
  png_paths = []
  for directory in dep_subdirs:
    for f in _IterFiles(directory):
      if not keep_predicate(f):
        os.remove(f)
      elif f.endswith('.png'):
        png_paths.append((f, directory))
  if png_paths and options.png_to_webp:
    renamed_paths.update(_ConvertToWebP(options.webp_binary, png_paths))
  for directory in dep_subdirs:
    renamed_paths.update(_MoveImagesToNonMdpiFolders(directory))

  if options.optimize_resources:
    if options.unoptimized_resources_path:
      unoptimized_apk_path = options.unoptimized_resources_path
    else:
      unoptimized_apk_path = os.path.join(gen_dir, 'intermediate.ap_')
  else:
    unoptimized_apk_path = options.apk_path
  link_command = _CreateLinkApkArgs(options)
  # TODO(digit): Is this below actually required for R.txt generation?
  link_command += ['--java', gen_dir]

  fixed_manifest = _FixManifest(options, temp_dir)
  link_command += ['--manifest', fixed_manifest]

  partials = _CompileDeps(options.aapt2_path, dep_subdirs, temp_dir)
  for partial in partials:
    link_command += ['-R', partial]

  # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
  # Also creates R.txt
  with build_utils.AtomicOutput(unoptimized_apk_path) as unoptimized, \
      build_utils.AtomicOutput(r_txt_path) as r_txt:
    link_command += ['-o', unoptimized.name]
    link_command += ['--output-text-symbols', r_txt.name]
    build_utils.CheckOutput(
        link_command, print_stdout=False, print_stderr=False)

    if options.optimize_resources:
      with build_utils.AtomicOutput(options.apk_path) as optimized:
        _OptimizeApk(optimized.name, options, temp_dir, unoptimized.name,
                     r_txt.name)

  _CreateResourceInfoFile(
      renamed_paths, options.apk_info_path, options.dependencies_res_zips)
Esempio n. 21
0
def _OnStaleMd5(options):
    aapt = options.aapt_path
    with build_utils.TempDir() as temp_dir:
        deps_dir = os.path.join(temp_dir, 'deps')
        build_utils.MakeDirectory(deps_dir)
        v14_dir = os.path.join(temp_dir, 'v14')
        build_utils.MakeDirectory(v14_dir)

        gen_dir = os.path.join(temp_dir, 'gen')
        build_utils.MakeDirectory(gen_dir)
        r_txt_path = os.path.join(gen_dir, 'R.txt')
        srcjar_dir = os.path.join(temp_dir, 'java')

        input_resource_dirs = options.resource_dirs

        if not options.v14_skip:
            for resource_dir in input_resource_dirs:
                generate_v14_compatible_resources.GenerateV14Resources(
                    resource_dir, v14_dir)

        dep_zips = options.dependencies_res_zips
        dep_subdirs = []
        for z in dep_zips:
            subdir = os.path.join(deps_dir, os.path.basename(z))
            if os.path.exists(subdir):
                raise Exception('Resource zip name conflict: ' +
                                os.path.basename(z))
            build_utils.ExtractAll(z, path=subdir)
            dep_subdirs.append(subdir)

        # Generate R.java. This R.java contains non-final constants and is used only
        # while compiling the library jar (e.g. chromium_content.jar). When building
        # an apk, a new R.java file with the correct resource -> ID mappings will be
        # generated by merging the resources from all libraries and the main apk
        # project.
        package_command = [
            aapt,
            'package',
            '-m',
            '-M',
            options.android_manifest,
            '--auto-add-overlay',
            '--no-version-vectors',
            '-I',
            options.android_sdk_jar,
            '--output-text-symbols',
            gen_dir,
            '-J',
            gen_dir,  # Required for R.txt generation.
            '--ignore-assets',
            build_utils.AAPT_IGNORE_PATTERN
        ]

        # aapt supports only the "--include-all-resources" mode, where each R.java
        # file ends up with all symbols, rather than only those that it had at the
        # time it was originally generated. This subtle difference makes no
        # difference when compiling, but can lead to increased unused symbols in the
        # resulting R.class files.
        # TODO(agrieve): See if proguard makes this difference actually translate
        # into a size difference. If not, we can delete all of our custom R.java
        # template code above (and make include_all_resources the default).
        if options.include_all_resources:
            srcjar_dir = gen_dir
            if options.extra_res_packages:
                colon_separated = ':'.join(options.extra_res_packages)
                package_command += ['--extra-packages', colon_separated]
            if options.non_constant_id:
                package_command.append('--non-constant-id')
            if options.custom_package:
                package_command += ['--custom-package', options.custom_package]
            if options.shared_resources:
                package_command.append('--shared-lib')
            if options.app_as_shared_lib:
                package_command.append('--app-as-shared-lib')

        for d in input_resource_dirs:
            package_command += ['-S', d]

        # Adding all dependencies as sources is necessary for @type/foo references
        # to symbols within dependencies to resolve. However, it has the side-effect
        # that all Java symbols from dependencies are copied into the new R.java.
        # E.g.: It enables an arguably incorrect usage of
        # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be
        # more correct. This is just how Android works.
        for d in dep_subdirs:
            package_command += ['-S', d]

        if options.proguard_file:
            package_command += ['-G', options.proguard_file]
        if options.proguard_file_main_dex:
            package_command += ['-D', options.proguard_file_main_dex]
        build_utils.CheckOutput(package_command, print_stderr=False)

        # When an empty res/ directory is passed, aapt does not write an R.txt.
        if not os.path.exists(r_txt_path):
            build_utils.Touch(r_txt_path)

        if not options.include_all_resources:
            # --include-all-resources can only be specified for generating final R
            # classes for APK. It makes no sense for APK to have pre-generated R.txt
            # though, because aapt-generated already lists all available resources.
            if options.r_text_in:
                r_txt_path = options.r_text_in

            packages = list(options.extra_res_packages)
            r_txt_files = list(options.extra_r_text_files)

            cur_package = options.custom_package
            if not options.custom_package:
                cur_package = _ExtractPackageFromManifest(
                    options.android_manifest)

            # Don't create a .java file for the current resource target when:
            # - no package name was provided (either by manifest or build rules),
            # - there was already a dependent android_resources() with the same
            #   package (occurs mostly when an apk target and resources target share
            #   an AndroidManifest.xml)
            if cur_package != 'org.dummy' and cur_package not in packages:
                packages.append(cur_package)
                r_txt_files.append(r_txt_path)

            if packages:
                shared_resources = options.shared_resources or options.app_as_shared_lib
                CreateRJavaFiles(srcjar_dir, r_txt_path, packages, r_txt_files,
                                 shared_resources)

        # This is the list of directories with resources to put in the final .zip
        # file. The order of these is important so that crunched/v14 resources
        # override the normal ones.
        zip_resource_dirs = input_resource_dirs + [v14_dir]

        base_crunch_dir = os.path.join(temp_dir, 'crunch')

        # Crunch image resources. This shrinks png files and is necessary for
        # 9-patch images to display correctly. 'aapt crunch' accepts only a single
        # directory at a time and deletes everything in the output directory.
        for idx, input_dir in enumerate(input_resource_dirs):
            crunch_dir = os.path.join(base_crunch_dir, str(idx))
            build_utils.MakeDirectory(crunch_dir)
            zip_resource_dirs.append(crunch_dir)
            CrunchDirectory(aapt, input_dir, crunch_dir)

        ZipResources(zip_resource_dirs, options.resource_zip_out)

        if options.all_resources_zip_out:
            CombineZips([options.resource_zip_out] + dep_zips,
                        options.all_resources_zip_out)

        if options.R_dir:
            build_utils.DeleteDirectory(options.R_dir)
            shutil.copytree(srcjar_dir, options.R_dir)
        else:
            build_utils.ZipDir(options.srcjar_out, srcjar_dir)

        if options.r_text_out:
            shutil.copyfile(r_txt_path, options.r_text_out)
Esempio n. 22
0
def _OptimizeWithR8(options,
                    config_paths,
                    libraries,
                    dynamic_config_data,
                    print_stdout=False):
  with build_utils.TempDir() as tmp_dir:
    if dynamic_config_data:
      tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt')
      with open(tmp_config_path, 'w') as f:
        f.write(dynamic_config_data)
      config_paths = config_paths + [tmp_config_path]

    tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
    # If there is no output (no classes are kept), this prevents this script
    # from failing.
    build_utils.Touch(tmp_mapping_path)

    tmp_output = os.path.join(tmp_dir, 'r8out')
    os.mkdir(tmp_output)

    feature_contexts = []
    if options.feature_names:
      for name, dest_dex, input_paths in zip(
          options.feature_names, options.dex_dests, options.feature_jars):
        feature_context = _DexPathContext(name, dest_dex, input_paths,
                                          tmp_output)
        if name == 'base':
          base_dex_context = feature_context
        else:
          feature_contexts.append(feature_context)
    else:
      base_dex_context = _DexPathContext('base', options.output_path,
                                         options.input_paths, tmp_output)

    cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
        '-Dcom.android.tools.r8.allowTestProguardOptions=1',
    ]
    if options.disable_outlining:
      cmd += ['-Dcom.android.tools.r8.disableOutlining=1']
    cmd += [
        '-cp',
        options.r8_path,
        'com.android.tools.r8.R8',
        '--no-data-resources',
        '--output',
        base_dex_context.staging_dir,
        '--pg-map-output',
        tmp_mapping_path,
    ]

    if options.desugar_jdk_libs_json:
      cmd += [
          '--desugared-lib',
          options.desugar_jdk_libs_json,
          '--desugared-lib-pg-conf-output',
          options.desugared_library_keep_rule_output,
      ]

    if options.min_api:
      cmd += ['--min-api', options.min_api]

    if options.force_enable_assertions:
      cmd += ['--force-enable-assertions']

    for lib in libraries:
      cmd += ['--lib', lib]

    for config_file in config_paths:
      cmd += ['--pg-conf', config_file]

    if options.main_dex_rules_path:
      for main_dex_rule in options.main_dex_rules_path:
        cmd += ['--main-dex-rules', main_dex_rule]

    module_input_jars = set(base_dex_context.input_paths)
    for feature in feature_contexts:
      feature_input_jars = [
          p for p in feature.input_paths if p not in module_input_jars
      ]
      module_input_jars.update(feature_input_jars)
      for in_jar in feature_input_jars:
        cmd += ['--feature', in_jar, feature.staging_dir]

    cmd += base_dex_context.input_paths
    # Add any extra input jars to the base module (e.g. desugar runtime).
    extra_jars = set(options.input_paths) - module_input_jars
    cmd += sorted(extra_jars)

    try:
      stderr_filter = dex.CreateStderrFilter(
          options.show_desugar_default_interface_warnings)
      build_utils.CheckOutput(cmd,
                              print_stdout=print_stdout,
                              stderr_filter=stderr_filter,
                              fail_on_output=options.warnings_as_errors)
    except build_utils.CalledProcessError as err:
      debugging_link = ('\n\nR8 failed. Please see {}.'.format(
          'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
          'android/docs/java_optimization.md#Debugging-common-failures\n'))
      raise build_utils.CalledProcessError(err.cwd, err.args,
                                           err.output + debugging_link)

    base_has_imported_lib = False
    if options.desugar_jdk_libs_json:
      existing_files = build_utils.FindInDirectory(base_dex_context.staging_dir)
      jdk_dex_output = os.path.join(base_dex_context.staging_dir,
                                    'classes%d.dex' % (len(existing_files) + 1))
      base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
          options.r8_path, options.min_api, options.desugar_jdk_libs_json,
          options.desugar_jdk_libs_jar,
          options.desugar_jdk_libs_configuration_jar,
          options.desugared_library_keep_rule_output, jdk_dex_output,
          options.warnings_as_errors)

    base_dex_context.CreateOutput(base_has_imported_lib,
                                  options.desugared_library_keep_rule_output)
    for feature in feature_contexts:
      feature.CreateOutput()

    with open(options.mapping_output, 'w') as out_file, \
        open(tmp_mapping_path) as in_file:
      # Mapping files generated by R8 include comments that may break
      # some of our tooling so remove those (specifically: apkanalyzer).
      out_file.writelines(l for l in in_file if not l.startswith('#'))
Esempio n. 23
0
def _PackageApk(options, build):
  """Compile and link resources with aapt2.

  Args:
    options: The command-line options.
    build: BuildContext object.
  Returns:
    The manifest package name for the APK.
  """
  logging.debug('Extracting resource .zips')
  dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
                                           build.deps_dir)
  logging.debug('Applying locale transformations')
  path_info = resource_utils.ResourceInfoFile()
  if options.support_zh_hk:
    _DuplicateZhResources(dep_subdirs, path_info)
  _RenameLocaleResourceDirs(dep_subdirs, path_info)

  logging.debug('Applying file-based exclusions')
  keep_predicate = _CreateKeepPredicate(options.resource_exclusion_regex,
                                        options.resource_exclusion_exceptions)
  png_paths = _FilterResourceFiles(dep_subdirs, keep_predicate)

  if options.locale_allowlist or options.shared_resources_allowlist_locales:
    logging.debug('Applying locale-based string exclusions')
    _RemoveUnwantedLocalizedStrings(dep_subdirs, options)

  if png_paths and options.png_to_webp:
    logging.debug('Converting png->webp')
    _ConvertToWebP(options.webp_binary, png_paths, path_info,
                   options.webp_cache_dir)
  logging.debug('Applying drawable transformations')
  for directory in dep_subdirs:
    _MoveImagesToNonMdpiFolders(directory, path_info)
    _RemoveImageExtensions(directory, path_info)

  logging.debug('Running aapt2 compile')
  exclusion_rules = [x.split(':', 1) for x in options.values_filter_rules]
  partials = _CompileDeps(options.aapt2_path, dep_subdirs, build.temp_dir,
                          exclusion_rules)

  link_command = [
      options.aapt2_path,
      'link',
      '--auto-add-overlay',
      '--no-version-vectors',
      # Set SDK versions in case they are not set in the Android manifest.
      '--min-sdk-version',
      options.min_sdk_version,
      '--target-sdk-version',
      options.target_sdk_version,
  ]

  for j in options.include_resources:
    link_command += ['-I', j]
  if options.version_code:
    link_command += ['--version-code', options.version_code]
  if options.version_name:
    link_command += ['--version-name', options.version_name]
  if options.proguard_file:
    link_command += ['--proguard', build.proguard_path]
    link_command += ['--proguard-minimal-keep-rules']
  if options.proguard_file_main_dex:
    link_command += ['--proguard-main-dex', build.proguard_main_dex_path]
  if options.emit_ids_out:
    link_command += ['--emit-ids', build.emit_ids_path]
  if options.r_text_in:
    shutil.copyfile(options.r_text_in, build.r_txt_path)
  else:
    link_command += ['--output-text-symbols', build.r_txt_path]

  # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib
  #       can be used with recent versions of aapt2.
  if options.shared_resources:
    link_command.append('--shared-lib')

  if options.no_xml_namespaces:
    link_command.append('--no-xml-namespaces')

  if options.package_id:
    link_command += [
        '--package-id',
        hex(options.package_id),
        '--allow-reserved-package-id',
    ]

  fixed_manifest, desired_manifest_package_name = _FixManifest(
      options, build.temp_dir)
  if options.rename_manifest_package:
    desired_manifest_package_name = options.rename_manifest_package

  link_command += [
      '--manifest', fixed_manifest, '--rename-manifest-package',
      desired_manifest_package_name
  ]

  # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
  # Also creates R.txt
  if options.use_resource_ids_path:
    _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path,
                         desired_manifest_package_name)
    link_command += ['--stable-ids', build.stable_ids_path]

  for partial in partials:
    link_command += ['-R', partial]

  # We always create a binary arsc file first, then convert to proto, so flags
  # such as --shared-lib can be supported.
  arsc_path = build.arsc_path
  if arsc_path is None:
    _, arsc_path = tempfile.mkstmp()
  link_command += ['-o', build.arsc_path]

  logging.debug('Starting: aapt2 link')
  link_proc = subprocess.Popen(link_command)

  # Create .res.info file in parallel.
  _CreateResourceInfoFile(path_info, build.info_path,
                          options.dependencies_res_zips)
  logging.debug('Created .res.info file')

  exit_code = link_proc.wait()
  logging.debug('Finished: aapt2 link')
  if exit_code:
    raise subprocess.CalledProcessError(exit_code, link_command)

  if options.proguard_file and (options.shared_resources
                                or options.app_as_shared_lib):
    # Make sure the R class associated with the manifest package does not have
    # its onResourcesLoaded method obfuscated or removed, so that the framework
    # can call it in the case where the APK is being loaded as a library.
    with open(build.proguard_path, 'a') as proguard_file:
      keep_rule = '''
                  -keep class {package}.R {{
                    public static void onResourcesLoaded(int);
                  }}
                  '''.format(package=desired_manifest_package_name)
      proguard_file.write(textwrap.dedent(keep_rule))

  logging.debug('Running aapt2 convert')
  build_utils.CheckOutput([
      options.aapt2_path, 'convert', '--output-format', 'proto', '-o',
      build.proto_path, build.arsc_path
  ])

  # Workaround for b/147674078. This is only needed for WebLayer and does not
  # affect WebView usage, since WebView does not used dynamic attributes.
  if options.shared_resources:
    logging.debug('Hardcoding dynamic attributes')
    protoresources.HardcodeSharedLibraryDynamicAttributes(
        build.proto_path, options.is_bundle_module,
        options.shared_resources_allowlist)

    build_utils.CheckOutput([
        options.aapt2_path, 'convert', '--output-format', 'binary', '-o',
        build.arsc_path, build.proto_path
    ])

  if build.arsc_path is None:
    os.remove(arsc_path)

  if options.optimized_proto_path:
    _OptimizeApk(build.optimized_proto_path, options, build.temp_dir,
                 build.proto_path, build.r_txt_path)
  elif options.optimized_arsc_path:
    _OptimizeApk(build.optimized_arsc_path, options, build.temp_dir,
                 build.arsc_path, build.r_txt_path)

  return desired_manifest_package_name
Esempio n. 24
0
def main(args):
    args = build_utils.ExpandFileArgs(args)
    options = _ParseOptions(args)

    proguard = proguard_util.ProguardCmdBuilder(options.proguard_path)
    proguard.injars(options.input_paths)
    proguard.configs(options.proguard_configs)
    proguard.config_exclusions(options.proguard_config_exclusions)
    proguard.outjar(options.output_path)
    proguard.mapping_output(options.mapping_output)

    # If a jar is part of input no need to include it as library jar.
    classpath = [
        p for p in set(options.classpath) if p not in options.input_paths
    ]
    proguard.libraryjars(classpath)
    proguard.verbose(options.verbose)
    if not options.enable_dangerous_optimizations:
        proguard.disable_optimizations(_DANGEROUS_OPTIMIZATIONS)

    # TODO(agrieve): Remove proguard usages.
    if options.r8_path:
        with tempfile.NamedTemporaryFile() as mapping_temp:
            if options.output_path.endswith('.dex'):
                with build_utils.TempDir() as tmp_dex_dir:
                    cmd = _CreateR8Command(options, mapping_temp.name,
                                           tmp_dex_dir)
                    build_utils.CheckOutput(cmd)
                    _MoveTempDexFile(tmp_dex_dir, options.output_path)
            else:
                cmd = _CreateR8Command(options, mapping_temp.name,
                                       options.output_path)
                build_utils.CheckOutput(cmd)

            # Copy the mapping file back to where it should be.
            map_path = options.mapping_output
            with build_utils.AtomicOutput(map_path) as mapping:
                # Mapping files generated by R8 include comments that may break
                # some of our tooling so remove those.
                mapping_temp.seek(0)
                mapping.writelines(l for l in mapping_temp
                                   if not l.startswith("#"))

        build_utils.WriteDepfile(options.depfile,
                                 options.output_path,
                                 inputs=proguard.GetDepfileDeps(),
                                 add_pydeps=False)
    else:
        # Do not consider the temp file as an input since its name is random.
        input_paths = proguard.GetInputs()

        with tempfile.NamedTemporaryFile() as f:
            if options.mapping:
                input_paths.append(options.mapping)
                # Maintain only class name mappings in the .mapping file in order to
                # work around what appears to be a ProGuard bug in -applymapping:
                #     method 'int close()' is not being kept as 'a', but remapped to 'c'
                _RemoveMethodMappings(options.mapping, f)
                proguard.mapping(f.name)

            input_strings = proguard.build()
            if f.name in input_strings:
                input_strings[input_strings.index(f.name)] = '$M'

            build_utils.CallAndWriteDepfileIfStale(
                proguard.CheckOutput,
                options,
                input_paths=input_paths,
                input_strings=input_strings,
                output_paths=proguard.GetOutputs(),
                depfile_deps=proguard.GetDepfileDeps(),
                add_pydeps=False)
Esempio n. 25
0
def main(argv):
  build_utils.InitLogging('TURBINE_DEBUG')
  argv = build_utils.ExpandFileArgs(argv[1:])
  parser = argparse.ArgumentParser()
  build_utils.AddDepfileOption(parser)
  parser.add_argument('--target-name', help='Fully qualified GN target name.')
  parser.add_argument(
      '--turbine-jar-path', required=True, help='Path to the turbine jar file.')
  parser.add_argument(
      '--java-srcjars',
      action='append',
      default=[],
      help='List of srcjars to include in compilation.')
  parser.add_argument(
      '--bootclasspath',
      action='append',
      default=[],
      help='Boot classpath for javac. If this is specified multiple times, '
      'they will all be appended to construct the classpath.')
  parser.add_argument(
      '--java-version',
      help='Java language version to use in -source and -target args to javac.')
  parser.add_argument('--classpath', action='append', help='Classpath to use.')
  parser.add_argument(
      '--processors',
      action='append',
      help='GN list of annotation processor main classes.')
  parser.add_argument(
      '--processorpath',
      action='append',
      help='GN list of jars that comprise the classpath used for Annotation '
      'Processors.')
  parser.add_argument(
      '--processor-args',
      action='append',
      help='key=value arguments for the annotation processors.')
  parser.add_argument('--jar-path', help='Jar output path.', required=True)
  parser.add_argument(
      '--generated-jar-path',
      required=True,
      help='Output path for generated source files.')
  parser.add_argument('--warnings-as-errors',
                      action='store_true',
                      help='Treat all warnings as errors.')
  options, unknown_args = parser.parse_known_args(argv)

  options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
  options.classpath = build_utils.ParseGnList(options.classpath)
  options.processorpath = build_utils.ParseGnList(options.processorpath)
  options.processors = build_utils.ParseGnList(options.processors)
  options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)

  files = []
  for arg in unknown_args:
    # Interpret a path prefixed with @ as a file containing a list of sources.
    if arg.startswith('@'):
      files.extend(build_utils.ReadSourcesList(arg[1:]))

  cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
      '-classpath', options.turbine_jar_path, 'com.google.turbine.main.Main'
  ]
  javac_cmd = []

  # Turbine reads lists from command line args by consuming args until one
  # starts with double dash (--). Thus command line args should be grouped
  # together and passed in together.
  if options.processors:
    cmd += ['--processors']
    cmd += options.processors

  if options.java_version:
    javac_cmd.extend([
        '-source',
        options.java_version,
        '-target',
        options.java_version,
    ])
  if options.java_version == '1.8':
    # Android's boot jar doesn't contain all java 8 classes.
    options.bootclasspath.append(build_utils.RT_JAR_PATH)

  if options.bootclasspath:
    cmd += ['--bootclasspath']
    for bootclasspath in options.bootclasspath:
      cmd += bootclasspath.split(':')

  if options.processorpath:
    cmd += ['--processorpath']
    cmd += options.processorpath

  if options.processor_args:
    for arg in options.processor_args:
      javac_cmd.extend(['-A%s' % arg])

  if options.classpath:
    cmd += ['--classpath']
    cmd += options.classpath

  if options.java_srcjars:
    cmd += ['--source_jars']
    cmd += options.java_srcjars

  if files:
    # Use jar_path to ensure paths are relative (needed for goma).
    files_rsp_path = options.jar_path + '.files_list.txt'
    with open(files_rsp_path, 'w') as f:
      f.write(' '.join(files))
    # Pass source paths as response files to avoid extremely long command lines
    # that are tedius to debug.
    cmd += ['--sources']
    cmd += ['@' + files_rsp_path]

  if javac_cmd:
    cmd.append('--javacopts')
    cmd += javac_cmd
    cmd.append('--')  # Terminate javacopts

  # Use AtomicOutput so that output timestamps are not updated when outputs
  # are not changed.
  with build_utils.AtomicOutput(options.jar_path) as output_jar, \
      build_utils.AtomicOutput(options.generated_jar_path) as generated_jar:
    cmd += ['--output', output_jar.name, '--gensrc_output', generated_jar.name]

    process_javac_output_partial = functools.partial(
        ProcessJavacOutput, target_name=options.target_name)

    logging.debug('Command: %s', cmd)
    start = time.time()
    build_utils.CheckOutput(cmd,
                            print_stdout=True,
                            stdout_filter=process_javac_output_partial,
                            stderr_filter=process_javac_output_partial,
                            fail_on_output=options.warnings_as_errors)
    end = time.time() - start
    logging.info('Header compilation took %ss', end)

  if options.depfile:
    # GN already knows of the java files, so avoid listing individual java files
    # in the depfile.
    depfile_deps = (options.bootclasspath + options.classpath +
                    options.processorpath + options.java_srcjars)
    build_utils.WriteDepfile(options.depfile, options.jar_path, depfile_deps)
Esempio n. 26
0
def _OnStaleMd5(options, javac_cmd, java_files, classpath):
    logging.info('Starting _OnStaleMd5')

    # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
    # rules run both in parallel, with Error Prone only used for checks.
    save_outputs = not options.enable_errorprone

    with build_utils.TempDir() as temp_dir:
        srcjars = options.java_srcjars

        classes_dir = os.path.join(temp_dir, 'classes')
        os.makedirs(classes_dir)

        if save_outputs:
            generated_java_dir = options.generated_dir
        else:
            generated_java_dir = os.path.join(temp_dir, 'gen')

        shutil.rmtree(generated_java_dir, True)

        srcjar_files = {}
        if srcjars:
            logging.info('Extracting srcjars to %s', generated_java_dir)
            build_utils.MakeDirectory(generated_java_dir)
            jar_srcs = []
            for srcjar in options.java_srcjars:
                extracted_files = build_utils.ExtractAll(
                    srcjar,
                    no_clobber=True,
                    path=generated_java_dir,
                    pattern='*.java')
                for path in extracted_files:
                    # We want the path inside the srcjar so the viewer can have a tree
                    # structure.
                    srcjar_files[path] = '{}/{}'.format(
                        srcjar, os.path.relpath(path, generated_java_dir))
                jar_srcs.extend(extracted_files)
            logging.info('Done extracting srcjars')
            java_files.extend(jar_srcs)

        if java_files:
            # Don't include the output directory in the initial set of args since it
            # being in a temp dir makes it unstable (breaks md5 stamping).
            cmd = javac_cmd + ['-d', classes_dir]

            # Pass classpath and source paths as response files to avoid extremely
            # long command lines that are tedius to debug.
            if classpath:
                cmd += ['-classpath', ':'.join(classpath)]

            java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
            with open(java_files_rsp_path, 'w') as f:
                f.write(' '.join(java_files))
            cmd += ['@' + java_files_rsp_path]

            logging.debug('Build command %s', cmd)
            build_utils.CheckOutput(cmd,
                                    print_stdout=options.chromium_code,
                                    stderr_filter=ProcessJavacOutput)
            logging.info('Finished build command')

        if save_outputs:
            # Creating the jar file takes the longest, start it first on a separate
            # process to unblock the rest of the post-processing steps.
            jar_file_worker = multiprocessing.Process(
                target=_CreateJarFile,
                args=(options.jar_path, options.provider_configurations,
                      options.additional_jar_files, classes_dir))
            jar_file_worker.start()
        else:
            jar_file_worker = None
            build_utils.Touch(options.jar_path)

        if save_outputs:
            _CreateInfoFile(java_files, options.jar_path,
                            options.chromium_code, srcjar_files, classes_dir,
                            generated_java_dir, options.jar_info_exclude_globs)
        else:
            build_utils.Touch(options.jar_path + '.info')

        if jar_file_worker:
            jar_file_worker.join()
        logging.info('Completed all steps in _OnStaleMd5')
Esempio n. 27
0
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs,
                classpath):
    # Don't bother enabling incremental compilation for non-chromium code.
    incremental = options.incremental and options.chromium_code

    with build_utils.TempDir() as temp_dir:
        srcjars = options.java_srcjars

        classes_dir = os.path.join(temp_dir, 'classes')
        os.makedirs(classes_dir)

        changed_paths = None
        # jmake can handle deleted files, but it's a rare case and it would
        # complicate this script's logic.
        if incremental and changes.AddedOrModifiedOnly():
            changed_paths = set(changes.IterChangedPaths())
            # Do a full compile if classpath has changed.
            # jmake doesn't seem to do this on its own... Might be that ijars mess up
            # its change-detection logic.
            if any(p in changed_paths for p in classpath_inputs):
                changed_paths = None

        if options.incremental:
            pdb_path = options.jar_path + '.pdb'

        if incremental:
            # jmake is a compiler wrapper that figures out the minimal set of .java
            # files that need to be rebuilt given a set of .java files that have
            # changed.
            # jmake determines what files are stale based on timestamps between .java
            # and .class files. Since we use .jars, .srcjars, and md5 checks,
            # timestamp info isn't accurate for this purpose. Rather than use jmake's
            # programatic interface (like we eventually should), we ensure that all
            # .class files are newer than their .java files, and convey to jmake which
            # sources are stale by having their .class files be missing entirely
            # (by not extracting them).
            javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path)
            if srcjars:
                _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir)

        srcjar_files = dict()
        if srcjars:
            java_dir = os.path.join(temp_dir, 'java')
            os.makedirs(java_dir)
            for srcjar in options.java_srcjars:
                if changed_paths:
                    changed_paths.update(
                        os.path.join(java_dir, f)
                        for f in changes.IterChangedSubpaths(srcjar))
                extracted_files = build_utils.ExtractAll(srcjar,
                                                         path=java_dir,
                                                         pattern='*.java')
                for path in extracted_files:
                    srcjar_files[path] = srcjar
            jar_srcs = build_utils.FindInDirectory(java_dir, '*.java')
            java_files.extend(jar_srcs)
            if changed_paths:
                # Set the mtime of all sources to 0 since we use the absence of .class
                # files to tell jmake which files are stale.
                for path in jar_srcs:
                    os.utime(path, (0, 0))

        _CreateInfoFile(java_files, options, srcjar_files)

        if java_files:
            if changed_paths:
                changed_java_files = [
                    p for p in java_files if p in changed_paths
                ]
                if os.path.exists(options.jar_path):
                    _ExtractClassFiles(options.jar_path, classes_dir,
                                       changed_java_files)
                # Add the extracted files to the classpath. This is required because
                # when compiling only a subset of files, classes that haven't changed
                # need to be findable.
                classpath.append(classes_dir)

            # Can happen when a target goes from having no sources, to having sources.
            # It's created by the call to build_utils.Touch() below.
            if incremental:
                if os.path.exists(pdb_path) and not os.path.getsize(pdb_path):
                    os.unlink(pdb_path)

            # Don't include the output directory in the initial set of args since it
            # being in a temp dir makes it unstable (breaks md5 stamping).
            cmd = javac_cmd + ['-d', classes_dir]

            # Pass classpath and source paths as response files to avoid extremely
            # long command lines that are tedius to debug.
            if classpath:
                cmd += ['-classpath', ':'.join(classpath)]

            java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
            with open(java_files_rsp_path, 'w') as f:
                f.write(' '.join(java_files))
            cmd += ['@' + java_files_rsp_path]

            # JMake prints out some diagnostic logs that we want to ignore.
            # This assumes that all compiler output goes through stderr.
            stdout_filter = lambda s: ''
            if md5_check.PRINT_EXPLANATIONS:
                stdout_filter = None

            attempt_build = lambda: build_utils.CheckOutput(
                cmd,
                print_stdout=options.chromium_code,
                stdout_filter=stdout_filter,
                stderr_filter=ProcessJavacOutput)
            try:
                attempt_build()
            except build_utils.CalledProcessError as e:
                # Work-around for a bug in jmake (http://crbug.com/551449).
                if 'project database corrupted' not in e.output:
                    raise
                print(
                    'Applying work-around for jmake project database corrupted '
                    '(http://crbug.com/551449).')
                os.unlink(pdb_path)
                attempt_build()

        if options.incremental and (not java_files or not incremental):
            # Make sure output exists.
            build_utils.Touch(pdb_path)

        with build_utils.AtomicOutput(options.jar_path) as f:
            jar.JarDirectory(
                classes_dir,
                f.name,
                provider_configurations=options.provider_configurations,
                additional_files=options.additional_jar_files)
Esempio n. 28
0
def _RunCompiler(options,
                 javac_cmd,
                 java_files,
                 classpath,
                 jar_path,
                 save_outputs=True):
    logging.info('Starting _RunCompiler')

    # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
    # rules run both in parallel, with Error Prone only used for checks.
    save_outputs = not options.enable_errorprone

    # Use jar_path's directory to ensure paths are relative (needed for goma).
    temp_dir = jar_path + '.staging'
    shutil.rmtree(temp_dir, True)
    os.makedirs(temp_dir)
    try:
        classes_dir = os.path.join(temp_dir, 'classes')
        service_provider_configuration = os.path.join(
            temp_dir, 'service_provider_configuration')

        if save_outputs:
            input_srcjars_dir = os.path.join(options.generated_dir,
                                             'input_srcjars')
            annotation_processor_outputs_dir = os.path.join(
                options.generated_dir, 'annotation_processor_outputs')
            # Delete any stale files in the generated directory. The purpose of
            # options.generated_dir is for codesearch.
            shutil.rmtree(options.generated_dir, True)
            info_file_context = _InfoFileContext(
                options.chromium_code, options.jar_info_exclude_globs)
        else:
            input_srcjars_dir = os.path.join(temp_dir, 'input_srcjars')
            annotation_processor_outputs_dir = os.path.join(
                temp_dir, 'annotation_processor_outputs')

        if options.java_srcjars:
            logging.info('Extracting srcjars to %s', input_srcjars_dir)
            build_utils.MakeDirectory(input_srcjars_dir)
            for srcjar in options.java_srcjars:
                extracted_files = build_utils.ExtractAll(
                    srcjar,
                    no_clobber=True,
                    path=input_srcjars_dir,
                    pattern='*.java')
                java_files.extend(extracted_files)
                if save_outputs:
                    info_file_context.AddSrcJarSources(srcjar, extracted_files,
                                                       input_srcjars_dir)
            logging.info('Done extracting srcjars')

        if options.header_jar:
            logging.info('Extracting service provider configs')
            # Extract META-INF/services/* so that it can be copied into the output
            # .jar
            build_utils.ExtractAll(options.header_jar,
                                   no_clobber=True,
                                   path=service_provider_configuration,
                                   pattern='META-INF/services/*')
            logging.info('Done extracting service provider configs')

        if save_outputs and java_files:
            info_file_context.SubmitFiles(java_files)

        if java_files:
            # Don't include the output directory in the initial set of args since it
            # being in a temp dir makes it unstable (breaks md5 stamping).
            cmd = list(javac_cmd)
            os.makedirs(classes_dir)
            cmd += ['-d', classes_dir]

            if options.processors:
                os.makedirs(annotation_processor_outputs_dir)
                cmd += ['-s', annotation_processor_outputs_dir]

            if classpath:
                cmd += ['-classpath', ':'.join(classpath)]

            # Pass source paths as response files to avoid extremely long command
            # lines that are tedius to debug.
            java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
            with open(java_files_rsp_path, 'w') as f:
                f.write(' '.join(java_files))
            cmd += ['@' + java_files_rsp_path]

            logging.debug('Build command %s', cmd)
            start = time.time()
            build_utils.CheckOutput(cmd,
                                    print_stdout=options.chromium_code,
                                    stdout_filter=ProcessJavacOutput,
                                    stderr_filter=ProcessJavacOutput,
                                    fail_on_output=options.warnings_as_errors)
            end = time.time() - start
            logging.info('Java compilation took %ss', end)

        if save_outputs:
            if options.processors:
                annotation_processor_java_files = build_utils.FindInDirectory(
                    annotation_processor_outputs_dir)
                if annotation_processor_java_files:
                    info_file_context.SubmitFiles(
                        annotation_processor_java_files)

            _CreateJarFile(jar_path, service_provider_configuration,
                           options.additional_jar_files, classes_dir)

            info_file_context.Commit(jar_path + '.info')
        else:
            build_utils.Touch(jar_path)

        logging.info('Completed all steps in _RunCompiler')
    finally:
        shutil.rmtree(temp_dir)
Esempio n. 29
0
 def Compile():
   build_utils.CheckOutput(
       javac_cmd,
       print_stdout=chromium_code,
       stderr_filter=ColorJavacOutput)
Esempio n. 30
0
def main():
    options = ParseArgs()
    android_jar = os.path.join(options.android_sdk, 'android.jar')
    aapt = options.aapt_path

    with build_utils.TempDir() as temp_dir:
        package_command = [
            aapt,
            'package',
            '--version-code',
            options.version_code,
            '--version-name',
            options.version_name,
            '-M',
            options.android_manifest,
            '--no-crunch',
            '-f',
            '--auto-add-overlay',
            '-I',
            android_jar,
            '-F',
            options.apk_path,
            '--ignore-assets',
            build_utils.AAPT_IGNORE_PATTERN,
        ]

        if options.no_compress:
            for ext in options.no_compress.split(','):
                package_command += ['-0', ext]
        if options.shared_resources:
            package_command.append('--shared-lib')

        if options.asset_dir and os.path.exists(options.asset_dir):
            package_command += ['-A', options.asset_dir]

        if options.resource_zips:
            dep_zips = build_utils.ParseGypList(options.resource_zips)
            for z in dep_zips:
                subdir = os.path.join(temp_dir, os.path.basename(z))
                if os.path.exists(subdir):
                    raise Exception('Resource zip name conflict: ' +
                                    os.path.basename(z))
                build_utils.ExtractAll(z, path=subdir)
                package_command += PackageArgsForExtractedZip(subdir)

        if options.create_density_splits:
            for config in DENSITY_SPLITS.itervalues():
                package_command.extend(('--split', ','.join(config)))

        language_splits = None
        if options.language_splits:
            language_splits = build_utils.ParseGypList(options.language_splits)
            for lang in language_splits:
                package_command.extend(('--split', lang))

        if 'Debug' in options.configuration_name:
            package_command += ['--debug-mode']

        build_utils.CheckOutput(package_command,
                                print_stdout=False,
                                print_stderr=False)

        if options.create_density_splits or language_splits:
            CheckForMissedConfigs(options.apk_path,
                                  options.create_density_splits,
                                  language_splits)

        if options.create_density_splits:
            RenameDensitySplits(options.apk_path)

        if options.depfile:
            build_utils.WriteDepfile(options.depfile,
                                     build_utils.GetPythonDependencies())