コード例 #1
0
def _OptimizeWithR8(options,
                    config_paths,
                    libraries,
                    dynamic_config_data,
                    print_stdout=False):
  with build_utils.TempDir() as tmp_dir:
    if dynamic_config_data:
      tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt')
      with open(tmp_config_path, 'w') as f:
        f.write(dynamic_config_data)
      config_paths = config_paths + [tmp_config_path]

    tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
    # If there is no output (no classes are kept), this prevents this script
    # from failing.
    build_utils.Touch(tmp_mapping_path)

    tmp_output = os.path.join(tmp_dir, 'r8out')
    os.mkdir(tmp_output)

    cmd = [
        build_utils.JAVA_PATH,
        '-jar',
        options.r8_path,
        '--no-data-resources',
        '--output',
        tmp_output,
        '--pg-map-output',
        tmp_mapping_path,
    ]

    if not options.desugar:
      cmd += ['--no-desugaring']

    for lib in libraries:
      cmd += ['--lib', lib]

    for config_file in config_paths:
      cmd += ['--pg-conf', config_file]

    if options.min_api:
      cmd += ['--min-api', options.min_api]

    if options.force_enable_assertions:
      cmd += ['--force-enable-assertions']

    if options.main_dex_rules_path:
      for main_dex_rule in options.main_dex_rules_path:
        cmd += ['--main-dex-rules', main_dex_rule]

    cmd += options.input_paths

    env = os.environ.copy()
    stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)
    env['_JAVA_OPTIONS'] = '-Dcom.android.tools.r8.allowTestProguardOptions=1'
    if options.disable_outlining:
      env['_JAVA_OPTIONS'] += ' -Dcom.android.tools.r8.disableOutlining=1'

    try:
      build_utils.CheckOutput(
          cmd, env=env, print_stdout=print_stdout, stderr_filter=stderr_filter)
    except build_utils.CalledProcessError as err:
      debugging_link = ('R8 failed. Please see {}.'.format(
          'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
          'android/docs/java_optimization.md#Debugging-common-failures\n'))
      raise ProguardProcessError(err, debugging_link)

    found_files = build_utils.FindInDirectory(tmp_output)
    if not options.output_path.endswith('.dex'):
      # Add to .jar using Python rather than having R8 output to a .zip directly
      # in order to disable compression of the .jar, saving ~500ms.
      tmp_jar_output = tmp_output + '.jar'
      build_utils.DoZip(found_files, tmp_jar_output, base_dir=tmp_output)
      shutil.move(tmp_jar_output, options.output_path)
    else:
      if len(found_files) > 1:
        raise Exception('Too many files created: {}'.format(found_files))
      shutil.move(found_files[0], options.output_path)

    with open(options.mapping_output, 'w') as out_file, \
        open(tmp_mapping_path) as in_file:
      # Mapping files generated by R8 include comments that may break
      # some of our tooling so remove those (specifically: apkanalyzer).
      out_file.writelines(l for l in in_file if not l.startswith('#'))
コード例 #2
0
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs,
                classpath):
  # Don't bother enabling incremental compilation for non-chromium code.
  incremental = options.incremental and options.chromium_code

  with build_utils.TempDir() as temp_dir:
    srcjars = options.java_srcjars

    classes_dir = os.path.join(temp_dir, 'classes')
    os.makedirs(classes_dir)

    changed_paths = None
    # jmake can handle deleted files, but it's a rare case and it would
    # complicate this script's logic.
    if incremental and changes.AddedOrModifiedOnly():
      changed_paths = set(changes.IterChangedPaths())
      # Do a full compile if classpath has changed.
      # jmake doesn't seem to do this on its own... Might be that ijars mess up
      # its change-detection logic.
      if any(p in changed_paths for p in classpath_inputs):
        changed_paths = None

    if options.incremental:
      pdb_path = options.jar_path + '.pdb'

    if incremental:
      # jmake is a compiler wrapper that figures out the minimal set of .java
      # files that need to be rebuilt given a set of .java files that have
      # changed.
      # jmake determines what files are stale based on timestamps between .java
      # and .class files. Since we use .jars, .srcjars, and md5 checks,
      # timestamp info isn't accurate for this purpose. Rather than use jmake's
      # programatic interface (like we eventually should), we ensure that all
      # .class files are newer than their .java files, and convey to jmake which
      # sources are stale by having their .class files be missing entirely
      # (by not extracting them).
      javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path)
      if srcjars:
        _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir)

    srcjar_files = dict()
    if srcjars:
      java_dir = os.path.join(temp_dir, 'java')
      os.makedirs(java_dir)
      for srcjar in options.java_srcjars:
        if changed_paths:
          changed_paths.update(os.path.join(java_dir, f)
                               for f in changes.IterChangedSubpaths(srcjar))
        extracted_files = build_utils.ExtractAll(
            srcjar, path=java_dir, pattern='*.java')
        for path in extracted_files:
          srcjar_files[path] = srcjar
      jar_srcs = build_utils.FindInDirectory(java_dir, '*.java')
      java_files.extend(jar_srcs)
      if changed_paths:
        # Set the mtime of all sources to 0 since we use the absence of .class
        # files to tell jmake which files are stale.
        for path in jar_srcs:
          os.utime(path, (0, 0))

    _CreateInfoFile(java_files, options, srcjar_files)

    if java_files:
      if changed_paths:
        changed_java_files = [p for p in java_files if p in changed_paths]
        if os.path.exists(options.jar_path):
          _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files)
        # Add the extracted files to the classpath. This is required because
        # when compiling only a subset of files, classes that haven't changed
        # need to be findable.
        classpath.append(classes_dir)

      # Can happen when a target goes from having no sources, to having sources.
      # It's created by the call to build_utils.Touch() below.
      if incremental:
        if os.path.exists(pdb_path) and not os.path.getsize(pdb_path):
          os.unlink(pdb_path)

      # Don't include the output directory in the initial set of args since it
      # being in a temp dir makes it unstable (breaks md5 stamping).
      cmd = javac_cmd + ['-d', classes_dir]

      # Pass classpath and source paths as response files to avoid extremely
      # long command lines that are tedius to debug.
      if classpath:
        cmd += ['-classpath', ':'.join(classpath)]

      java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
      with open(java_files_rsp_path, 'w') as f:
        f.write(' '.join(java_files))
      cmd += ['@' + java_files_rsp_path]

      # JMake prints out some diagnostic logs that we want to ignore.
      # This assumes that all compiler output goes through stderr.
      stdout_filter = lambda s: ''
      if md5_check.PRINT_EXPLANATIONS:
        stdout_filter = None

      attempt_build = lambda: build_utils.CheckOutput(
          cmd,
          print_stdout=options.chromium_code,
          stdout_filter=stdout_filter,
          stderr_filter=ProcessJavacOutput)
      try:
        attempt_build()
      except build_utils.CalledProcessError as e:
        # Work-around for a bug in jmake (http://crbug.com/551449).
        if ('project database corrupted' not in e.output
            and 'jmake: internal Java exception' not in e.output):
          raise
        print ('Applying work-around for jmake project database corrupted '
               '(http://crbug.com/551449).')
        os.unlink(pdb_path)
        attempt_build()

    if options.incremental and (not java_files or not incremental):
      # Make sure output exists.
      build_utils.Touch(pdb_path)

    with build_utils.AtomicOutput(options.jar_path) as f:
      jar.JarDirectory(classes_dir,
                       f.name,
                       provider_configurations=options.provider_configurations,
                       additional_files=options.additional_jar_files)
コード例 #3
0
def _OptimizeWithR8(options,
                    config_paths,
                    libraries,
                    dynamic_config_data,
                    print_stdout=False):
    with build_utils.TempDir() as tmp_dir:
        if dynamic_config_data:
            tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt')
            with open(tmp_config_path, 'w') as f:
                f.write(dynamic_config_data)
            config_paths = config_paths + [tmp_config_path]

        tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
        # If there is no output (no classes are kept), this prevents this script
        # from failing.
        build_utils.Touch(tmp_mapping_path)

        tmp_output = os.path.join(tmp_dir, 'r8out')
        os.mkdir(tmp_output)

        feature_contexts = []
        if options.feature_names:
            for name, dest_dex, input_paths in zip(options.feature_names,
                                                   options.dex_dests,
                                                   options.feature_jars):
                feature_context = _DexPathContext(name, dest_dex, input_paths,
                                                  tmp_output)
                if name == 'base':
                    base_dex_context = feature_context
                else:
                    feature_contexts.append(feature_context)
        else:
            base_dex_context = _DexPathContext('base', options.output_path,
                                               options.input_paths, tmp_output)

        cmd = [
            build_utils.JAVA_PATH,
            '-cp',
            options.r8_path,
            'com.android.tools.r8.R8',
            '--no-data-resources',
            '--output',
            base_dex_context.staging_dir,
            '--pg-map-output',
            tmp_mapping_path,
        ]

        if options.desugar_jdk_libs_json:
            cmd += [
                '--desugared-lib',
                options.desugar_jdk_libs_json,
                '--desugared-lib-pg-conf-output',
                options.desugared_library_keep_rule_output,
            ]

        if options.min_api:
            cmd += ['--min-api', options.min_api]

        if options.force_enable_assertions:
            cmd += ['--force-enable-assertions']

        for lib in libraries:
            cmd += ['--lib', lib]

        for config_file in config_paths:
            cmd += ['--pg-conf', config_file]

        if options.main_dex_rules_path:
            for main_dex_rule in options.main_dex_rules_path:
                cmd += ['--main-dex-rules', main_dex_rule]

        module_input_jars = set(base_dex_context.input_paths)
        for feature in feature_contexts:
            feature_input_jars = [
                p for p in feature.input_paths if p not in module_input_jars
            ]
            module_input_jars.update(feature_input_jars)
            for in_jar in feature_input_jars:
                cmd += ['--feature', in_jar, feature.staging_dir]

        cmd += base_dex_context.input_paths
        # Add any extra input jars to the base module (e.g. desugar runtime).
        extra_jars = set(options.input_paths) - module_input_jars
        cmd += sorted(extra_jars)

        env = os.environ.copy()
        stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)
        env['_JAVA_OPTIONS'] = '-Dcom.android.tools.r8.allowTestProguardOptions=1'
        if options.disable_outlining:
            env['_JAVA_OPTIONS'] += ' -Dcom.android.tools.r8.disableOutlining=1'

        try:
            build_utils.CheckOutput(cmd,
                                    env=env,
                                    print_stdout=print_stdout,
                                    stderr_filter=stderr_filter)
        except build_utils.CalledProcessError as err:
            debugging_link = ('R8 failed. Please see {}.'.format(
                'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
                'android/docs/java_optimization.md#Debugging-common-failures\n'
            ))
            raise ProguardProcessError(err, debugging_link)

        base_has_imported_lib = False
        if options.desugar_jdk_libs_json:
            existing_files = build_utils.FindInDirectory(
                base_dex_context.staging_dir)
            base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
                options.r8_path, options.min_api,
                options.desugar_jdk_libs_json, options.desugar_jdk_libs_jar,
                options.desugared_library_keep_rule_output,
                os.path.join(base_dex_context.staging_dir,
                             'classes%d.dex' % (len(existing_files) + 1)))

        base_dex_context.CreateOutput(
            base_has_imported_lib, options.desugared_library_keep_rule_output)
        for feature in feature_contexts:
            feature.CreateOutput()

        with open(options.mapping_output, 'w') as out_file, \
            open(tmp_mapping_path) as in_file:
            # Mapping files generated by R8 include comments that may break
            # some of our tooling so remove those (specifically: apkanalyzer).
            out_file.writelines(l for l in in_file if not l.startswith('#'))
コード例 #4
0
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs):
    incremental = options.incremental
    # Don't bother enabling incremental compilation for third_party code, since
    # _CheckPathMatchesClassName() fails on some of it, and it's not really much
    # benefit.
    for java_file in java_files:
        if 'third_party' in java_file:
            incremental = False
        else:
            _CheckPathMatchesClassName(java_file)

    with build_utils.TempDir() as temp_dir:
        srcjars = options.java_srcjars
        # The .excluded.jar contains .class files excluded from the main jar.
        # It is used for incremental compiles.
        excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar')

        classes_dir = os.path.join(temp_dir, 'classes')
        os.makedirs(classes_dir)

        changed_paths = None
        # jmake can handle deleted files, but it's a rare case and it would
        # complicate this script's logic.
        if incremental and changes.AddedOrModifiedOnly():
            changed_paths = set(changes.IterChangedPaths())
            # Do a full compile if classpath has changed.
            # jmake doesn't seem to do this on its own... Might be that ijars mess up
            # its change-detection logic.
            if any(p in changed_paths for p in classpath_inputs):
                changed_paths = None

        if options.incremental:
            pdb_path = options.jar_path + '.pdb'

        if incremental:
            # jmake is a compiler wrapper that figures out the minimal set of .java
            # files that need to be rebuilt given a set of .java files that have
            # changed.
            # jmake determines what files are stale based on timestamps between .java
            # and .class files. Since we use .jars, .srcjars, and md5 checks,
            # timestamp info isn't accurate for this purpose. Rather than use jmake's
            # programatic interface (like we eventually should), we ensure that all
            # .class files are newer than their .java files, and convey to jmake which
            # sources are stale by having their .class files be missing entirely
            # (by not extracting them).
            javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path)
            if srcjars:
                _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir)

        if srcjars:
            java_dir = os.path.join(temp_dir, 'java')
            os.makedirs(java_dir)
            for srcjar in options.java_srcjars:
                if changed_paths:
                    changed_paths.update(
                        os.path.join(java_dir, f)
                        for f in changes.IterChangedSubpaths(srcjar))
                build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java')
            jar_srcs = build_utils.FindInDirectory(java_dir, '*.java')
            jar_srcs = _FilterJavaFiles(jar_srcs, options.javac_includes)
            java_files.extend(jar_srcs)
            if changed_paths:
                # Set the mtime of all sources to 0 since we use the absence of .class
                # files to tell jmake which files are stale.
                for path in jar_srcs:
                    os.utime(path, (0, 0))

        if java_files:
            if changed_paths:
                changed_java_files = [
                    p for p in java_files if p in changed_paths
                ]
                if os.path.exists(options.jar_path):
                    _ExtractClassFiles(options.jar_path, classes_dir,
                                       changed_java_files)
                if os.path.exists(excluded_jar_path):
                    _ExtractClassFiles(excluded_jar_path, classes_dir,
                                       changed_java_files)
                # Add the extracted files to the classpath. This is required because
                # when compiling only a subset of files, classes that haven't changed
                # need to be findable.
                classpath_idx = javac_cmd.index('-classpath')
                javac_cmd[classpath_idx + 1] += ':' + classes_dir

            # Can happen when a target goes from having no sources, to having sources.
            # It's created by the call to build_utils.Touch() below.
            if incremental:
                if os.path.exists(pdb_path) and not os.path.getsize(pdb_path):
                    os.unlink(pdb_path)

            # Don't include the output directory in the initial set of args since it
            # being in a temp dir makes it unstable (breaks md5 stamping).
            cmd = javac_cmd + ['-d', classes_dir] + java_files

            # JMake prints out some diagnostic logs that we want to ignore.
            # This assumes that all compiler output goes through stderr.
            stdout_filter = lambda s: ''
            if md5_check.PRINT_EXPLANATIONS:
                stdout_filter = None

            attempt_build = lambda: build_utils.CheckOutput(
                cmd,
                print_stdout=options.chromium_code,
                stdout_filter=stdout_filter,
                stderr_filter=ColorJavacOutput)
            try:
                attempt_build()
            except build_utils.CalledProcessError as e:
                # Work-around for a bug in jmake (http://crbug.com/551449).
                if 'project database corrupted' not in e.output:
                    raise
                print(
                    'Applying work-around for jmake project database corrupted '
                    '(http://crbug.com/551449).')
                os.unlink(pdb_path)
                attempt_build()

        if options.incremental and (not java_files or not incremental):
            # Make sure output exists.
            build_utils.Touch(pdb_path)

        glob = options.jar_excluded_classes
        inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob)
        exclusion_predicate = lambda f: not inclusion_predicate(f)

        jar.JarDirectory(
            classes_dir,
            options.jar_path,
            predicate=inclusion_predicate,
            provider_configurations=options.provider_configurations,
            additional_files=options.additional_jar_files)
        jar.JarDirectory(
            classes_dir,
            excluded_jar_path,
            predicate=exclusion_predicate,
            provider_configurations=options.provider_configurations,
            additional_files=options.additional_jar_files)
コード例 #5
0
def _OnStaleMd5(lint_path,
                config_path,
                processed_config_path,
                manifest_path,
                result_path,
                product_dir,
                sources,
                jar_path,
                cache_dir,
                android_sdk_version,
                resource_sources,
                classpath=None,
                can_fail_build=False,
                silent=False):
    def _RelativizePath(path):
        """Returns relative path to top-level src dir.

    Args:
      path: A path relative to cwd.
    """
        return os.path.relpath(os.path.abspath(path), _SRC_ROOT)

    def _ProcessConfigFile():
        if not config_path or not processed_config_path:
            return
        if not build_utils.IsTimeStale(processed_config_path, [config_path]):
            return

        with open(config_path, 'rb') as f:
            content = f.read().replace('PRODUCT_DIR',
                                       _RelativizePath(product_dir))

        with open(processed_config_path, 'wb') as f:
            f.write(content)

    def _ProcessResultFile():
        with open(result_path, 'rb') as f:
            content = f.read().replace(_RelativizePath(product_dir),
                                       'PRODUCT_DIR')

        with open(result_path, 'wb') as f:
            f.write(content)

    def _ParseAndShowResultFile():
        dom = minidom.parse(result_path)
        issues = dom.getElementsByTagName('issue')
        if not silent:
            print >> sys.stderr
            for issue in issues:
                issue_id = issue.attributes['id'].value
                message = issue.attributes['message'].value
                location_elem = issue.getElementsByTagName('location')[0]
                path = location_elem.attributes['file'].value
                line = location_elem.getAttribute('line')
                if line:
                    error = '%s:%s %s: %s [warning]' % (path, line, message,
                                                        issue_id)
                else:
                    # Issues in class files don't have a line number.
                    error = '%s %s: %s [warning]' % (path, message, issue_id)
                print >> sys.stderr, error.encode('utf-8')
                for attr in ['errorLine1', 'errorLine2']:
                    error_line = issue.getAttribute(attr)
                    if error_line:
                        print >> sys.stderr, error_line.encode('utf-8')
        return len(issues)

    with build_utils.TempDir() as temp_dir:
        _ProcessConfigFile()

        cmd = [
            _RelativizePath(lint_path),
            '-Werror',
            '--exitcode',
            '--showall',
            '--xml',
            _RelativizePath(result_path),
        ]
        if jar_path:
            # --classpath is just for .class files for this one target.
            cmd.extend(['--classpath', _RelativizePath(jar_path)])
        if processed_config_path:
            cmd.extend(['--config', _RelativizePath(processed_config_path)])

        def _NewTempSubdir(prefix, all_subdirs):
            # Helper function to create a new sub directory based on the number of
            # subdirs created earlier. Path of the directory is appended to the
            # all_subdirs list.
            new_dir = os.path.join(temp_dir, prefix + str(len(all_subdirs)))
            os.mkdir(new_dir)
            all_subdirs.append(new_dir)
            return new_dir

        resource_dirs = []
        for resource_source in resource_sources:
            if os.path.isdir(resource_source):
                resource_dirs.append(resource_source)
            else:
                # This is a zip file with generated resources (e. g. strings from GRD).
                # Extract it to temporary folder.
                resource_dir = _NewTempSubdir('r', resource_dirs)
                build_utils.ExtractAll(resource_source, path=resource_dir)

        for resource_dir in resource_dirs:
            cmd.extend(['--resources', _RelativizePath(resource_dir)])

        if classpath:
            # --libraries is the classpath (excluding active target).
            cp = ':'.join(_RelativizePath(p) for p in classpath)
            cmd.extend(['--libraries', cp])

        # There may be multiple source files with the same basename (but in
        # different directories). It is difficult to determine what part of the path
        # corresponds to the java package, and so instead just link the source files
        # into temporary directories (creating a new one whenever there is a name
        # conflict).
        src_dirs = []

        def PathInDir(d, src):
            return os.path.join(d, os.path.basename(src))

        for src in sources:
            src_dir = None
            for d in src_dirs:
                if not os.path.exists(PathInDir(d, src)):
                    src_dir = d
                    break
            if not src_dir:
                src_dir = _NewTempSubdir('s', src_dirs)
                cmd.extend(['--sources', _RelativizePath(src_dir)])
            os.symlink(os.path.abspath(src), PathInDir(src_dir, src))

        project_dir = _NewTempSubdir('p', [])
        if android_sdk_version:
            # Create dummy project.properies file in a temporary "project" directory.
            # It is the only way to add Android SDK to the Lint's classpath. Proper
            # classpath is necessary for most source-level checks.
            with open(os.path.join(project_dir, 'project.properties'), 'w') \
                as propfile:
                print >> propfile, 'target=android-{}'.format(
                    android_sdk_version)

        # Put the manifest in a temporary directory in order to avoid lint detecting
        # sibling res/ and src/ directories (which should be pass explicitly if they
        # are to be included).
        if manifest_path:
            os.symlink(os.path.abspath(manifest_path),
                       PathInDir(project_dir, manifest_path))
        cmd.append(project_dir)

        if os.path.exists(result_path):
            os.remove(result_path)

        env = {}
        stderr_filter = None
        if cache_dir:
            env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RelativizePath(
                cache_dir)
            # When _JAVA_OPTIONS is set, java prints to stderr:
            # Picked up _JAVA_OPTIONS: ...
            #
            # We drop all lines that contain _JAVA_OPTIONS from the output
            stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)

        try:
            build_utils.CheckOutput(cmd,
                                    cwd=_SRC_ROOT,
                                    env=env or None,
                                    stderr_filter=stderr_filter)
        except build_utils.CalledProcessError:
            # There is a problem with lint usage
            if not os.path.exists(result_path):
                raise

            # Sometimes produces empty (almost) files:
            if os.path.getsize(result_path) < 10:
                if can_fail_build:
                    raise
                elif not silent:
                    traceback.print_exc()
                return

            # There are actual lint issues
            try:
                num_issues = _ParseAndShowResultFile()
            except Exception:  # pylint: disable=broad-except
                if not silent:
                    print 'Lint created unparseable xml file...'
                    print 'File contents:'
                    with open(result_path) as f:
                        print f.read()
                if not can_fail_build:
                    return

            if can_fail_build and not silent:
                traceback.print_exc()

            # There are actual lint issues
            try:
                num_issues = _ParseAndShowResultFile()
            except Exception:  # pylint: disable=broad-except
                if not silent:
                    print 'Lint created unparseable xml file...'
                    print 'File contents:'
                    with open(result_path) as f:
                        print f.read()
                raise

            _ProcessResultFile()
            msg = (
                '\nLint found %d new issues.\n'
                ' - For full explanation, please refer to %s\n'
                ' - For more information about lint and how to fix lint issues,'
                ' please refer to %s\n' %
                (num_issues, _RelativizePath(result_path), _LINT_MD_URL))
            if not silent:
                print >> sys.stderr, msg
            if can_fail_build:
                raise Exception('Lint failed.')
コード例 #6
0
ファイル: lint.py プロジェクト: bopopescu/Emma
def _OnStaleMd5(changes,
                lint_path,
                config_path,
                processed_config_path,
                manifest_path,
                result_path,
                product_dir,
                sources,
                jar_path,
                resource_dir=None,
                can_fail_build=False):
    def _RelativizePath(path):
        """Returns relative path to top-level src dir.

    Args:
      path: A path relative to cwd.
    """
        return os.path.relpath(os.path.abspath(path), _SRC_ROOT)

    def _ProcessConfigFile():
        if not build_utils.IsTimeStale(processed_config_path, [config_path]):
            return

        with open(config_path, 'rb') as f:
            content = f.read().replace('PRODUCT_DIR',
                                       _RelativizePath(product_dir))

        with open(processed_config_path, 'wb') as f:
            f.write(content)

    def _ProcessResultFile():
        with open(result_path, 'rb') as f:
            content = f.read().replace(_RelativizePath(product_dir),
                                       'PRODUCT_DIR')

        with open(result_path, 'wb') as f:
            f.write(content)

    def _ParseAndShowResultFile():
        dom = minidom.parse(result_path)
        issues = dom.getElementsByTagName('issue')
        print >> sys.stderr
        for issue in issues:
            issue_id = issue.attributes['id'].value
            message = issue.attributes['message'].value
            location_elem = issue.getElementsByTagName('location')[0]
            path = location_elem.attributes['file'].value
            line = location_elem.getAttribute('line')
            if line:
                error = '%s:%s %s: %s [warning]' % (path, line, message,
                                                    issue_id)
            else:
                # Issues in class files don't have a line number.
                error = '%s %s: %s [warning]' % (path, message, issue_id)
            print >> sys.stderr, error.encode('utf-8')
            for attr in ['errorLine1', 'errorLine2']:
                error_line = issue.getAttribute(attr)
                if error_line:
                    print >> sys.stderr, error_line.encode('utf-8')
        return len(issues)

    # Need to include all sources when a resource_dir is set so that resources are
    # not marked as unused.
    if not resource_dir and changes.AddedOrModifiedOnly():
        changed_paths = set(changes.IterChangedPaths())
        sources = [s for s in sources if s in changed_paths]

    with build_utils.TempDir() as temp_dir:
        _ProcessConfigFile()

        cmd = [
            _RelativizePath(lint_path),
            '-Werror',
            '--exitcode',
            '--showall',
            '--config',
            _RelativizePath(processed_config_path),
            '--classpath',
            _RelativizePath(jar_path),
            '--xml',
            _RelativizePath(result_path),
        ]
        if resource_dir:
            cmd.extend(['--resources', _RelativizePath(resource_dir)])

        # There may be multiple source files with the same basename (but in
        # different directories). It is difficult to determine what part of the path
        # corresponds to the java package, and so instead just link the source files
        # into temporary directories (creating a new one whenever there is a name
        # conflict).
        src_dirs = []

        def NewSourceDir():
            new_dir = os.path.join(temp_dir, str(len(src_dirs)))
            os.mkdir(new_dir)
            src_dirs.append(new_dir)
            cmd.extend(['--sources', _RelativizePath(new_dir)])
            return new_dir

        def PathInDir(d, src):
            return os.path.join(d, os.path.basename(src))

        for src in sources:
            src_dir = None
            for d in src_dirs:
                if not os.path.exists(PathInDir(d, src)):
                    src_dir = d
                    break
            if not src_dir:
                src_dir = NewSourceDir()
            os.symlink(os.path.abspath(src), PathInDir(src_dir, src))

        cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir)))

        if os.path.exists(result_path):
            os.remove(result_path)

        try:
            build_utils.CheckOutput(cmd, cwd=_SRC_ROOT)
        except build_utils.CalledProcessError:
            if can_fail_build:
                traceback.print_exc()

            # There is a problem with lint usage
            if not os.path.exists(result_path):
                raise

            # There are actual lint issues
            else:
                try:
                    num_issues = _ParseAndShowResultFile()
                except Exception:  # pylint: disable=broad-except
                    print 'Lint created unparseable xml file...'
                    print 'File contents:'
                    with open(result_path) as f:
                        print f.read()
                    raise

                _ProcessResultFile()
                msg = ('\nLint found %d new issues.\n'
                       ' - For full explanation refer to %s\n'
                       ' - Wanna suppress these issues?\n'
                       '    1. Read comment in %s\n'
                       '    2. Run "python %s %s"\n' %
                       (num_issues, _RelativizePath(result_path),
                        _RelativizePath(config_path),
                        _RelativizePath(
                            os.path.join(
                                _SRC_ROOT, 'build', 'android', 'lint',
                                'suppress.py')), _RelativizePath(result_path)))
                print >> sys.stderr, msg
                if can_fail_build:
                    raise Exception('Lint failed.')
コード例 #7
0
ファイル: dex.py プロジェクト: xproject2020/BlockChainBrowser
def main(args):
    options, paths = _ParseArgs(args)
    if ((options.proguard_enabled == 'true'
         and options.configuration_name == 'Release')
            or (options.debug_build_proguard_enabled == 'true'
                and options.configuration_name == 'Debug')):
        paths = [options.proguard_enabled_input_path]

    if options.inputs:
        paths += options.inputs

    if options.excluded_paths:
        # Excluded paths are relative to the output directory.
        exclude_paths = options.excluded_paths
        paths = [
            p for p in paths if
            not os.path.relpath(p, options.output_directory) in exclude_paths
        ]

    input_paths = list(paths)
    if options.multi_dex:
        input_paths.append(options.main_dex_list_path)

    dex_cmd = ['java', '-jar', options.d8_jar_path]
    options.sdk_jars = build_utils.ParseGnList(options.sdk_jars)
    options.classpath = build_utils.ParseGnList(options.classpath)
    for path in options.classpath:
        dex_cmd += ['--classpath', path]
    for path in options.sdk_jars:
        dex_cmd += ['--lib', path]
    if options.multi_dex:
        dex_cmd += ['--main-dex-list', options.main_dex_list_path]
    if options.release:
        dex_cmd += ['--release']
    if options.min_api:
        dex_cmd += ['--min-api', options.min_api]

    is_dex = options.dex_path.endswith('.dex')
    is_jar = options.dex_path.endswith('.jar')

    with build_utils.TempDir() as tmp_dir:
        tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
        os.mkdir(tmp_dex_dir)
        if is_jar and _NoClassFiles(paths):
            # Handle case where no classfiles are specified in inputs
            # by creating an empty JAR
            with zipfile.ZipFile(options.dex_path, 'w') as outfile:
                outfile.comment = 'empty'
        else:
            # .dex files can't specify a name for D8. Instead, we output them to a
            # temp directory then move them after the command has finished running
            # (see _MoveTempDexFile). For other files, tmp_dex_dir is None.
            _RunD8(dex_cmd, paths, tmp_dex_dir)

        tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output')
        if is_dex:
            _MoveTempDexFile(tmp_dex_dir, tmp_dex_output)
        else:
            # d8 supports outputting to a .zip, but does not have deterministic file
            # ordering: https://issuetracker.google.com/issues/119945929
            build_utils.ZipDir(tmp_dex_output, tmp_dex_dir)

        if options.dexlayout_profile:
            if options.proguard_mapping_path is not None:
                matching_profile = os.path.join(tmp_dir, 'obfuscated_profile')
                convert_dex_profile.ObfuscateProfile(
                    options.dexlayout_profile, tmp_dex_output,
                    options.proguard_mapping_path, options.dexdump_path,
                    matching_profile)
            else:
                logging.warning('No obfuscation for %s',
                                options.dexlayout_profile)
                matching_profile = options.dexlayout_profile
            binary_profile = _CreateBinaryProfile(matching_profile,
                                                  tmp_dex_output,
                                                  options.profman_path,
                                                  tmp_dir)
            output_files = _LayoutDex(binary_profile, tmp_dex_output,
                                      options.dexlayout_path, tmp_dir)
            target = None
            if len(output_files) > 1:
                target = _ZipMultidex(tmp_dir, output_files)
            else:
                output = output_files[0]
                if not zipfile.is_zipfile(output):
                    target = os.path.join(tmp_dir, 'dex_classes.zip')
                    _ZipSingleDex(output, target)
                else:
                    target = output
            shutil.move(os.path.join(tmp_dir, target), tmp_dex_output)

        # The dex file is complete and can be moved out of tmp_dir.
        shutil.move(tmp_dex_output, options.dex_path)

    build_utils.WriteDepfile(options.depfile,
                             options.dex_path,
                             input_paths,
                             add_pydeps=False)
コード例 #8
0
ファイル: javac.py プロジェクト: sencha/chromium-spacewalk
def main(argv):
  colorama.init()

  argv = build_utils.ExpandFileArgs(argv)

  parser = optparse.OptionParser()
  build_utils.AddDepfileOption(parser)

  parser.add_option(
      '--src-gendirs',
      help='Directories containing generated java files.')
  parser.add_option(
      '--java-srcjars',
      action='append',
      default=[],
      help='List of srcjars to include in compilation.')
  parser.add_option(
      '--classpath',
      action='append',
      help='Classpath for javac. If this is specified multiple times, they '
      'will all be appended to construct the classpath.')
  parser.add_option(
      '--javac-includes',
      help='A list of file patterns. If provided, only java files that match'
      'one of the patterns will be compiled.')
  parser.add_option(
      '--jar-excluded-classes',
      default='',
      help='List of .class file patterns to exclude from the jar.')

  parser.add_option(
      '--chromium-code',
      type='int',
      help='Whether code being compiled should be built with stricter '
      'warnings for chromium code.')

  parser.add_option(
      '--classes-dir',
      help='Directory for compiled .class files.')
  parser.add_option('--jar-path', help='Jar output path.')

  parser.add_option('--stamp', help='Path to touch on success.')

  options, args = parser.parse_args(argv)

  classpath = []
  for arg in options.classpath:
    classpath += build_utils.ParseGypList(arg)

  java_srcjars = []
  for arg in options.java_srcjars:
    java_srcjars += build_utils.ParseGypList(arg)

  java_files = args
  if options.src_gendirs:
    src_gendirs = build_utils.ParseGypList(options.src_gendirs)
    java_files += build_utils.FindInDirectories(src_gendirs, '*.java')

  input_files = classpath + java_srcjars + java_files
  with build_utils.TempDir() as temp_dir:
    classes_dir = os.path.join(temp_dir, 'classes')
    os.makedirs(classes_dir)
    if java_srcjars:
      java_dir = os.path.join(temp_dir, 'java')
      os.makedirs(java_dir)
      for srcjar in java_srcjars:
        build_utils.ExtractAll(srcjar, path=java_dir)
      java_files += build_utils.FindInDirectory(java_dir, '*.java')

    if options.javac_includes:
      javac_includes = build_utils.ParseGypList(options.javac_includes)
      filtered_java_files = []
      for f in java_files:
        for include in javac_includes:
          if fnmatch.fnmatch(f, include):
            filtered_java_files.append(f)
            break
      java_files = filtered_java_files

    DoJavac(
        classpath,
        classes_dir,
        options.chromium_code,
        java_files)

    if options.jar_path:
      jar.JarDirectory(classes_dir,
                       build_utils.ParseGypList(options.jar_excluded_classes),
                       options.jar_path)

    if options.classes_dir:
      # Delete the old classes directory. This ensures that all .class files in
      # the output are actually from the input .java files. For example, if a
      # .java file is deleted or an inner class is removed, the classes
      # directory should not contain the corresponding old .class file after
      # running this action.
      build_utils.DeleteDirectory(options.classes_dir)
      shutil.copytree(classes_dir, options.classes_dir)

  if options.depfile:
    build_utils.WriteDepfile(
        options.depfile,
        input_files + build_utils.GetPythonDependencies())

  if options.stamp:
    build_utils.Touch(options.stamp)
コード例 #9
0
ファイル: javac.py プロジェクト: spWang/buildroot
def main(argv):
    colorama.init()

    argv = build_utils.ExpandFileArgs(argv)

    parser = optparse.OptionParser()
    build_utils.AddDepfileOption(parser)

    parser.add_option('--src-gendirs',
                      help='Directories containing generated java files.')
    parser.add_option('--java-srcjars',
                      action='append',
                      default=[],
                      help='List of srcjars to include in compilation.')
    parser.add_option(
        '--bootclasspath',
        action='append',
        default=[],
        help='Boot classpath for javac. If this is specified multiple times, '
        'they will all be appended to construct the classpath.')
    parser.add_option(
        '--classpath',
        action='append',
        help='Classpath for javac. If this is specified multiple times, they '
        'will all be appended to construct the classpath.')
    parser.add_option(
        '--javac-includes',
        help='A list of file patterns. If provided, only java files that match'
        'one of the patterns will be compiled.')
    parser.add_option(
        '--jar-excluded-classes',
        default='',
        help='List of .class file patterns to exclude from the jar.')

    parser.add_option(
        '--chromium-code',
        type='int',
        help='Whether code being compiled should be built with stricter '
        'warnings for chromium code.')

    parser.add_option('--use-errorprone-path',
                      help='Use the Errorprone compiler at this path.')

    parser.add_option('--classes-dir',
                      help='Directory for compiled .class files.')
    parser.add_option('--jar-path', help='Jar output path.')
    parser.add_option('--jar-source-path', help='Source jar output path.')
    parser.add_option(
        '--jar-source-base-dir',
        help=
        'Base directory for the source files included in the output source jar.'
    )
    parser.add_option('--main-class',
                      help='The class containing the main method.')
    parser.add_option('--manifest-entry',
                      action='append',
                      help='Key:value pairs to add to the .jar manifest.')

    parser.add_option('--stamp', help='Path to touch on success.')

    options, args = parser.parse_args(argv)

    if options.main_class and not options.jar_path:
        parser.error('--main-class requires --jar-path')

    bootclasspath = []
    for arg in options.bootclasspath:
        bootclasspath += build_utils.ParseGypList(arg)

    classpath = []
    for arg in options.classpath:
        classpath += build_utils.ParseGypList(arg)

    java_srcjars = []
    for arg in options.java_srcjars:
        java_srcjars += build_utils.ParseGypList(arg)

    java_files = args
    if options.src_gendirs:
        src_gendirs = build_utils.ParseGypList(options.src_gendirs)
        java_files += build_utils.FindInDirectories(src_gendirs, '*.java')

    input_files = bootclasspath + classpath + java_srcjars + java_files
    with build_utils.TempDir() as temp_dir:
        classes_dir = os.path.join(temp_dir, 'classes')
        os.makedirs(classes_dir)
        if java_srcjars:
            java_dir = os.path.join(temp_dir, 'java')
            os.makedirs(java_dir)
            for srcjar in java_srcjars:
                build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java')
            java_files += build_utils.FindInDirectory(java_dir, '*.java')

        if options.javac_includes:
            javac_includes = build_utils.ParseGypList(options.javac_includes)
            filtered_java_files = []
            for f in java_files:
                for include in javac_includes:
                    if fnmatch.fnmatch(f, include):
                        filtered_java_files.append(f)
                        break
            java_files = filtered_java_files

        if len(java_files) != 0:
            DoJavac(bootclasspath, classpath, classes_dir,
                    options.chromium_code, options.use_errorprone_path,
                    java_files)

        if options.jar_path:
            if options.main_class or options.manifest_entry:
                if options.manifest_entry:
                    entries = map(lambda e: e.split(":"),
                                  options.manifest_entry)
                else:
                    entries = []
                manifest_file = os.path.join(temp_dir, 'manifest')
                CreateManifest(manifest_file, classpath, options.main_class,
                               entries)
            else:
                manifest_file = None
            jar.JarDirectory(classes_dir,
                             build_utils.ParseGypList(
                                 options.jar_excluded_classes),
                             options.jar_path,
                             manifest_file=manifest_file)

            if options.jar_source_path:
                jar.Jar(java_files, options.jar_source_base_dir,
                        options.jar_source_path)

        if options.classes_dir:
            # Delete the old classes directory. This ensures that all .class files in
            # the output are actually from the input .java files. For example, if a
            # .java file is deleted or an inner class is removed, the classes
            # directory should not contain the corresponding old .class file after
            # running this action.
            build_utils.DeleteDirectory(options.classes_dir)
            shutil.copytree(classes_dir, options.classes_dir)

    if options.depfile:
        build_utils.WriteDepfile(
            options.depfile, input_files + build_utils.GetPythonDependencies())

    if options.stamp:
        build_utils.Touch(options.stamp)
コード例 #10
0
def main(argv):
    parser = optparse.OptionParser()
    build_utils.AddDepfileOption(parser)
    parser.add_option("--protoc", help="Path to protoc binary.")
    parser.add_option("--proto-path", help="Path to proto directory.")
    parser.add_option("--java-out-dir",
                      help="Path to output directory for java files.")
    parser.add_option("--srcjar", help="Path to output srcjar.")
    parser.add_option("--stamp", help="File to touch on success.")
    parser.add_option("--nano",
                      help="Use to generate nano protos.",
                      action='store_true')
    parser.add_option("--protoc-javalite-plugin-dir",
                      help="Path to protoc java lite plugin directory.")
    parser.add_option(
        "--import-dir",
        action="append",
        default=[],
        help="Extra import directory for protos, can be repeated.")
    options, args = parser.parse_args(argv)

    build_utils.CheckOptions(options, parser, ['protoc', 'proto_path'])
    if not options.java_out_dir and not options.srcjar:
        print('One of --java-out-dir or --srcjar must be specified.')
        return 1

    if not options.nano and not options.protoc_javalite_plugin_dir:
        print(
            'One of --nano or --protoc-javalite-plugin-dir must be specified.')
        return 1

    proto_path_args = ['--proto_path', options.proto_path]
    for path in options.import_dir:
        proto_path_args += ["--proto_path", path]

    with build_utils.TempDir() as temp_dir:
        if options.nano:
            # Specify arguments to the generator.
            generator_args = [
                'optional_field_style=reftypes', 'store_unknown_fields=true'
            ]
            out_arg = '--javanano_out=' + ','.join(
                generator_args) + ':' + temp_dir
        else:
            out_arg = '--javalite_out=' + temp_dir

        custom_env = os.environ.copy()
        if options.protoc_javalite_plugin_dir:
            # If we are generating lite protos, then the lite plugin needs to be in
            # the path when protoc is called. See
            # https://github.com/protocolbuffers/protobuf/blob/master/java/lite.md
            custom_env['PATH'] = '{}:{}'.format(
                os.path.abspath(options.protoc_javalite_plugin_dir),
                custom_env['PATH'])

        # Generate Java files using protoc.
        build_utils.CheckOutput([options.protoc] + proto_path_args +
                                [out_arg] + args,
                                env=custom_env)

        if options.java_out_dir:
            build_utils.DeleteDirectory(options.java_out_dir)
            shutil.copytree(temp_dir, options.java_out_dir)
        else:
            build_utils.ZipDir(options.srcjar, temp_dir)

    if options.depfile:
        assert options.srcjar
        deps = args + [options.protoc]
        build_utils.WriteDepfile(options.depfile,
                                 options.srcjar,
                                 deps,
                                 add_pydeps=False)

    if options.stamp:
        build_utils.Touch(options.stamp)
コード例 #11
0
ファイル: proguard.py プロジェクト: frida/v8-build
def _OptimizeWithR8(options,
                    config_paths,
                    libraries,
                    dynamic_config_data,
                    print_stdout=False):
    with build_utils.TempDir() as tmp_dir:
        if dynamic_config_data:
            tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt')
            with open(tmp_config_path, 'w') as f:
                f.write(dynamic_config_data)
            config_paths = config_paths + [tmp_config_path]

        tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
        # If there is no output (no classes are kept), this prevents this script
        # from failing.
        build_utils.Touch(tmp_mapping_path)

        tmp_output = os.path.join(tmp_dir, 'r8out')
        os.mkdir(tmp_output)

        feature_contexts = []
        if options.feature_names:
            for name, dest_dex, input_paths in zip(options.feature_names,
                                                   options.dex_dests,
                                                   options.feature_jars):
                feature_context = _DexPathContext(name, dest_dex, input_paths,
                                                  tmp_output)
                if name == 'base':
                    base_dex_context = feature_context
                else:
                    feature_contexts.append(feature_context)
        else:
            base_dex_context = _DexPathContext('base', options.output_path,
                                               options.input_paths, tmp_output)

        cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
            '-Dcom.android.tools.r8.allowTestProguardOptions=1',
            '-Dcom.android.tools.r8.verticalClassMerging=1',
        ]
        if options.disable_outlining:
            cmd += ['-Dcom.android.tools.r8.disableOutlining=1']
        cmd += [
            '-cp',
            options.r8_path,
            'com.android.tools.r8.R8',
            '--no-data-resources',
            '--output',
            base_dex_context.staging_dir,
            '--pg-map-output',
            tmp_mapping_path,
        ]

        if options.disable_checks:
            # Info level priority logs are not printed by default.
            cmd += [
                '--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info'
            ]

        if options.desugar_jdk_libs_json:
            cmd += [
                '--desugared-lib',
                options.desugar_jdk_libs_json,
                '--desugared-lib-pg-conf-output',
                options.desugared_library_keep_rule_output,
            ]

        if options.min_api:
            cmd += ['--min-api', options.min_api]

        if options.force_enable_assertions:
            cmd += ['--force-enable-assertions']

        for lib in libraries:
            cmd += ['--lib', lib]

        for config_file in config_paths:
            cmd += ['--pg-conf', config_file]

        if options.main_dex_rules_path:
            for main_dex_rule in options.main_dex_rules_path:
                cmd += ['--main-dex-rules', main_dex_rule]

        base_jars = set(base_dex_context.input_paths)
        input_path_map = defaultdict(set)
        for feature in feature_contexts:
            parent = options.uses_split.get(feature.name, feature.name)
            input_path_map[parent].update(feature.input_paths)

        # If a jar is present in multiple features, it should be moved to the base
        # module.
        all_feature_jars = set()
        for input_paths in input_path_map.values():
            base_jars.update(all_feature_jars.intersection(input_paths))
            all_feature_jars.update(input_paths)

        module_input_jars = base_jars.copy()
        for feature in feature_contexts:
            input_paths = input_path_map.get(feature.name)
            # Input paths can be missing for a child feature present in the uses_split
            # map. These features get their input paths added to the parent, and are
            # split out later with DexSplitter.
            if input_paths is None:
                continue
            feature_input_jars = [
                p for p in input_paths if p not in module_input_jars
            ]
            module_input_jars.update(feature_input_jars)
            for in_jar in feature_input_jars:
                cmd += ['--feature', in_jar, feature.staging_dir]

        cmd += sorted(base_jars)
        # Add any extra input jars to the base module (e.g. desugar runtime).
        extra_jars = set(options.input_paths) - module_input_jars
        cmd += sorted(extra_jars)

        try:
            stderr_filter = dex.CreateStderrFilter(
                options.show_desugar_default_interface_warnings)
            logging.debug('Running R8')
            build_utils.CheckOutput(cmd,
                                    print_stdout=print_stdout,
                                    stderr_filter=stderr_filter,
                                    fail_on_output=options.warnings_as_errors)
        except build_utils.CalledProcessError as err:
            debugging_link = ('\n\nR8 failed. Please see {}.'.format(
                'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
                'android/docs/java_optimization.md#Debugging-common-failures\n'
            ))
            raise build_utils.CalledProcessError(err.cwd, err.args,
                                                 err.output + debugging_link)

        if options.uses_split:
            _SplitChildFeatures(options, feature_contexts, tmp_dir,
                                tmp_mapping_path, print_stdout)

        base_has_imported_lib = False
        if options.desugar_jdk_libs_json:
            logging.debug('Running L8')
            existing_files = build_utils.FindInDirectory(
                base_dex_context.staging_dir)
            jdk_dex_output = os.path.join(
                base_dex_context.staging_dir,
                'classes%d.dex' % (len(existing_files) + 1))
            base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
                options.r8_path, options.min_api,
                options.desugar_jdk_libs_json, options.desugar_jdk_libs_jar,
                options.desugar_jdk_libs_configuration_jar,
                options.desugared_library_keep_rule_output, jdk_dex_output,
                options.warnings_as_errors)

        logging.debug('Collecting ouputs')
        base_dex_context.CreateOutput(
            base_has_imported_lib, options.desugared_library_keep_rule_output)
        for feature in feature_contexts:
            feature.CreateOutput()

        with open(options.mapping_output, 'w') as out_file, \
            open(tmp_mapping_path) as in_file:
            # Mapping files generated by R8 include comments that may break
            # some of our tooling so remove those (specifically: apkanalyzer).
            out_file.writelines(l for l in in_file if not l.startswith('#'))
コード例 #12
0
def main(args):
  args = build_utils.ExpandFileArgs(args)
  options = _ParseOptions(args)

  libraries = []
  for p in options.classpath:
    # If a jar is part of input no need to include it as library jar.
    if p not in libraries and p not in options.input_paths:
      libraries.append(p)

  # TODO(agrieve): Remove proguard usages.
  if options.r8_path:
    temp_config_string = ''
    with build_utils.TempDir() as tmp_dir:
      tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
      tmp_proguard_config_path = os.path.join(tmp_dir, 'proguard_config.txt')
      # If there is no output (no classes are kept), this prevents this script
      # from failing.
      build_utils.Touch(tmp_mapping_path)

      f = cStringIO.StringIO()
      proguard_util.WriteFlagsFile(
          options.proguard_configs, f, exclude_generated=True)
      merged_configs = f.getvalue()
      # Fix up line endings (third_party configs can have windows endings)
      merged_configs = merged_configs.replace('\r', '')
      f.close()
      print_stdout = '-whyareyoukeeping' in merged_configs

      def run_r8(cmd):
        stderr_filter = None
        env = os.environ.copy()
        if options.disable_outlining:
          stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)
          env['_JAVA_OPTIONS'] = '-Dcom.android.tools.r8.disableOutlining=1'
        build_utils.CheckOutput(
            cmd,
            env=env,
            print_stdout=print_stdout,
            stderr_filter=stderr_filter)

      if options.output_path.endswith('.dex'):
        with build_utils.TempDir() as tmp_dex_dir:
          cmd, temp_config_string = _CreateR8Command(
              options, tmp_mapping_path, tmp_dex_dir, tmp_proguard_config_path,
              libraries)
          run_r8(cmd)
          _MoveTempDexFile(tmp_dex_dir, options.output_path)
      else:
        cmd, temp_config_string = _CreateR8Command(
            options, tmp_mapping_path, options.output_path,
            tmp_proguard_config_path, libraries)
        run_r8(cmd)

      # Copy output files to correct locations.
      with build_utils.AtomicOutput(options.mapping_output) as mapping:
        # Mapping files generated by R8 include comments that may break
        # some of our tooling so remove those.
        with open(tmp_mapping_path) as tmp:
          mapping.writelines(l for l in tmp if not l.startswith('#'))

      for output in build_utils.ParseGnList(options.extra_mapping_output_paths):
        shutil.copy(tmp_mapping_path, output)


    with build_utils.AtomicOutput(options.output_config) as f:
      f.write(merged_configs)
      if temp_config_string:
        f.write(_GENERATED_PROGUARD_HEADER)
        f.write(temp_config_string)

    if options.expected_configs_file:
      _VerifyExpectedConfigs(options.expected_configs_file,
                             options.output_config,
                             options.verify_expected_configs)

    other_inputs = []
    if options.apply_mapping:
      other_inputs += options.apply_mapping

    build_utils.WriteDepfile(
        options.depfile,
        options.output_path,
        inputs=options.proguard_configs + options.input_paths + libraries +
        other_inputs,
        add_pydeps=False)
  else:
    proguard = proguard_util.ProguardCmdBuilder(options.proguard_path)
    proguard.injars(options.input_paths)
    proguard.configs(options.proguard_configs)
    proguard.config_exclusions(options.proguard_config_exclusions)
    proguard.outjar(options.output_path)
    proguard.mapping_output(options.mapping_output)
    proguard.libraryjars(libraries)
    proguard.verbose(options.verbose)
    proguard.min_api(options.min_api)
    # Do not consider the temp file as an input since its name is random.
    input_paths = proguard.GetInputs()

    with tempfile.NamedTemporaryFile() as f:
      if options.apply_mapping:
        input_paths.append(options.apply_mapping)
        # Maintain only class name mappings in the .mapping file in order to
        # work around what appears to be a ProGuard bug in -applymapping:
        #     method 'int close()' is not being kept as 'a', but remapped to 'c'
        _RemoveMethodMappings(options.apply_mapping, f)
        proguard.mapping(f.name)
      with build_utils.TempDir() as d:
        proguard.tmp_dir(d)
        input_strings = proguard.build()
        if f.name in input_strings:
          input_strings[input_strings.index(f.name)] = '$M'

        build_utils.CallAndWriteDepfileIfStale(
            proguard.CheckOutput,
            options,
            input_paths=input_paths,
            input_strings=input_strings,
            output_paths=proguard.GetOutputs(),
            depfile_deps=proguard.GetDepfileDeps(),
            add_pydeps=False)
コード例 #13
0
ファイル: compile_resources.py プロジェクト: supbey/chromium
def _CreateNormalizedManifestForVerification(options):
  with build_utils.TempDir() as tempdir:
    fixed_manifest, _ = _FixManifest(
        options, tempdir, extra_manifest=options.extra_verification_manifest)
    with open(fixed_manifest) as f:
      return manifest_utils.NormalizeManifest(f.read())
コード例 #14
0
 def test_GetRTxtStringResourceNames(self):
   with build_utils.TempDir() as tmp_dir:
     tmp_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
     self.assertListEqual(
         resource_utils.GetRTxtStringResourceNames(tmp_file),
         _TEST_R_TXT_STRING_RESOURCE_NAMES)
コード例 #15
0
def main(args):
    args = build_utils.ExpandFileArgs(args)
    options = _ParseArgs(args)

    split_dimensions = []
    if options.split_dimensions:
        split_dimensions = [x.upper() for x in options.split_dimensions]

    with build_utils.TempDir() as tmp_dir:
        module_zips = [
            _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
            for module in options.module_zips]

        base_master_resource_ids = None
        if options.base_module_rtxt_path:
            base_master_resource_ids = _GenerateBaseResourcesAllowList(
                options.base_module_rtxt_path,
                options.base_allowlist_rtxt_path)

        bundle_config = _GenerateBundleConfigJson(
            options.uncompressed_assets, options.compress_shared_libraries,
            split_dimensions, base_master_resource_ids)

        tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')

        tmp_unsigned_bundle = tmp_bundle
        if options.keystore_path:
            tmp_unsigned_bundle = tmp_bundle + '.unsigned'

        # Important: bundletool requires that the bundle config file is
        # named with a .pb.json extension.
        tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'

        with open(tmp_bundle_config, 'w') as f:
            f.write(bundle_config)

        cmd_args = build_utils.JavaCmd(options.warnings_as_errors) + [
            '-jar',
            bundletool.BUNDLETOOL_JAR_PATH,
            'build-bundle',
            '--modules=' + ','.join(module_zips),
            '--output=' + tmp_unsigned_bundle,
            '--config=' + tmp_bundle_config,
        ]

        build_utils.CheckOutput(
            cmd_args,
            print_stdout=True,
            print_stderr=True,
            stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings,
            fail_on_output=options.warnings_as_errors)

        if options.validate_services:
            # TODO(crbug.com/1126301): This step takes 0.4s locally for bundles with
            # isolated splits disabled and 2s for bundles with isolated splits
            # enabled.  Consider making this run in parallel or move into a separate
            # step before enabling isolated splits by default.
            _MaybeCheckServicesPresentInBase(tmp_unsigned_bundle, module_zips)

        if options.keystore_path:
            # NOTE: As stated by the public documentation, apksigner cannot be used
            # to sign the bundle (because it rejects anything that isn't an APK).
            # The signature and digest algorithm selection come from the internal
            # App Bundle documentation. There is no corresponding public doc :-(
            signing_cmd_args = [
                'jarsigner',
                '-sigalg',
                'SHA256withRSA',
                '-digestalg',
                'SHA-256',
                '-keystore',
                'file:' + options.keystore_path,
                '-storepass',
                options.keystore_password,
                '-signedjar',
                tmp_bundle,
                tmp_unsigned_bundle,
                options.key_name,
            ]
            build_utils.CheckOutput(signing_cmd_args,
                                    print_stderr=True,
                                    fail_on_output=options.warnings_as_errors)

        shutil.move(tmp_bundle, options.out_bundle)

    if options.rtxt_out_path:
        _ConcatTextFiles(options.rtxt_in_paths, options.rtxt_out_path)

    if options.pathmap_out_path:
        _WriteBundlePathmap(options.pathmap_in_paths, options.module_names,
                            options.pathmap_out_path)
コード例 #16
0
def main(args):
    args = build_utils.ExpandFileArgs(args)
    options = _ParseOptions(args)

    libraries = []
    for p in options.classpath:
        # If a jar is part of input no need to include it as library jar.
        if p not in libraries and p not in options.input_paths:
            libraries.append(p)

    # TODO(agrieve): Remove proguard usages.
    if options.r8_path:
        with build_utils.TempDir() as tmp_dir:
            tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
            tmp_proguard_config_path = os.path.join(tmp_dir,
                                                    'proguard_config.txt')
            # If there is no output (no classes are kept), this prevents this script
            # from failing.
            build_utils.Touch(tmp_mapping_path)

            if options.output_path.endswith('.dex'):
                with build_utils.TempDir() as tmp_dex_dir:
                    cmd = _CreateR8Command(options, tmp_mapping_path,
                                           tmp_dex_dir,
                                           tmp_proguard_config_path, libraries)
                    build_utils.CheckOutput(cmd)
                    _MoveTempDexFile(tmp_dex_dir, options.output_path)
            else:
                cmd = _CreateR8Command(options, tmp_mapping_path,
                                       options.output_path,
                                       tmp_proguard_config_path, libraries)
                build_utils.CheckOutput(cmd)

            # Copy output files to correct locations.
            with build_utils.AtomicOutput(options.mapping_output) as mapping:
                # Mapping files generated by R8 include comments that may break
                # some of our tooling so remove those.
                with open(tmp_mapping_path) as tmp:
                    mapping.writelines(l for l in tmp if not l.startswith("#"))

        with build_utils.AtomicOutput(options.output_config) as merged_config:
            proguard_util.WriteFlagsFile(options.proguard_configs,
                                         merged_config,
                                         exclude_generated=True)

        if options.expected_configs_file:
            _VerifyExpectedConfigs(options.expected_configs_file,
                                   options.output_config,
                                   options.verify_expected_configs)

        other_inputs = []
        if options.apply_mapping:
            other_inputs += options.apply_mapping

        build_utils.WriteDepfile(options.depfile,
                                 options.output_path,
                                 inputs=options.proguard_configs +
                                 options.input_paths + libraries +
                                 other_inputs,
                                 add_pydeps=False)
    else:
        proguard = proguard_util.ProguardCmdBuilder(options.proguard_path)
        proguard.injars(options.input_paths)
        proguard.configs(options.proguard_configs)
        proguard.config_exclusions(options.proguard_config_exclusions)
        proguard.outjar(options.output_path)
        proguard.mapping_output(options.mapping_output)
        proguard.libraryjars(libraries)
        proguard.verbose(options.verbose)
        # Do not consider the temp file as an input since its name is random.
        input_paths = proguard.GetInputs()

        with tempfile.NamedTemporaryFile() as f:
            if options.apply_mapping:
                input_paths.append(options.apply_mapping)
                # Maintain only class name mappings in the .mapping file in order to
                # work around what appears to be a ProGuard bug in -applymapping:
                #     method 'int close()' is not being kept as 'a', but remapped to 'c'
                _RemoveMethodMappings(options.apply_mapping, f)
                proguard.mapping(f.name)

            input_strings = proguard.build()
            if f.name in input_strings:
                input_strings[input_strings.index(f.name)] = '$M'

            build_utils.CallAndWriteDepfileIfStale(
                proguard.CheckOutput,
                options,
                input_paths=input_paths,
                input_strings=input_strings,
                output_paths=proguard.GetOutputs(),
                depfile_deps=proguard.GetDepfileDeps(),
                add_pydeps=False)
コード例 #17
0
def main(args):
    args = build_utils.ExpandFileArgs(args)
    options = _ParseArgs(args)

    split_dimensions = []
    if options.split_dimensions:
        split_dimensions = [x.upper() for x in options.split_dimensions]

    with build_utils.TempDir() as tmp_dir:
        module_zips = [
            _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
            for module in options.module_zips]

        base_master_resource_ids = None
        if options.base_module_rtxt_path:
            base_master_resource_ids = _GenerateBaseResourcesWhitelist(
                options.base_module_rtxt_path,
                options.base_whitelist_rtxt_path)

        bundle_config = _GenerateBundleConfigJson(
            options.uncompressed_assets, options.compress_shared_libraries,
            split_dimensions, base_master_resource_ids)

        tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')

        tmp_unsigned_bundle = tmp_bundle
        if options.keystore_path:
            tmp_unsigned_bundle = tmp_bundle + '.unsigned'

        # Important: bundletool requires that the bundle config file is
        # named with a .pb.json extension.
        tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'

        with open(tmp_bundle_config, 'w') as f:
            f.write(bundle_config)

        cmd_args = [
            'java', '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-bundle'
        ]
        cmd_args += ['--modules=%s' % ','.join(module_zips)]
        cmd_args += ['--output=%s' % tmp_unsigned_bundle]
        cmd_args += ['--config=%s' % tmp_bundle_config]

        build_utils.CheckOutput(cmd_args, print_stdout=True, print_stderr=True)

        if options.keystore_path:
            # NOTE: As stated by the public documentation, apksigner cannot be used
            # to sign the bundle (because it rejects anything that isn't an APK).
            # The signature and digest algorithm selection come from the internal
            # App Bundle documentation. There is no corresponding public doc :-(
            signing_cmd_args = [
                'jarsigner',
                '-sigalg',
                'SHA256withRSA',
                '-digestalg',
                'SHA-256',
                '-keystore',
                'file:' + options.keystore_path,
                '-storepass',
                options.keystore_password,
                '-signedjar',
                tmp_bundle,
                tmp_unsigned_bundle,
                options.key_name,
            ]
            build_utils.CheckOutput(signing_cmd_args, print_stderr=True)

        shutil.move(tmp_bundle, options.out_bundle)

    if options.rtxt_out_path:
        with open(options.rtxt_out_path, 'w') as rtxt_out:
            for rtxt_in_path in options.rtxt_in_paths:
                with open(rtxt_in_path, 'r') as rtxt_in:
                    rtxt_out.write('-- Contents of {}\n'.format(
                        os.path.basename(rtxt_in_path)))
                    rtxt_out.write(rtxt_in.read())
コード例 #18
0
def _OnStaleMd5(options):
    aapt = options.aapt_path
    with build_utils.TempDir() as temp_dir:
        deps_dir = os.path.join(temp_dir, 'deps')
        build_utils.MakeDirectory(deps_dir)
        v14_dir = os.path.join(temp_dir, 'v14')
        build_utils.MakeDirectory(v14_dir)

        gen_dir = os.path.join(temp_dir, 'gen')
        build_utils.MakeDirectory(gen_dir)
        r_txt_path = os.path.join(gen_dir, 'R.txt')
        srcjar_dir = os.path.join(temp_dir, 'java')

        input_resource_dirs = options.resource_dirs

        if not options.v14_skip:
            for resource_dir in input_resource_dirs:
                generate_v14_compatible_resources.GenerateV14Resources(
                    resource_dir, v14_dir)

        dep_zips = options.dependencies_res_zips
        dep_subdirs = []
        for z in dep_zips:
            subdir = os.path.join(deps_dir, os.path.basename(z))
            if os.path.exists(subdir):
                raise Exception('Resource zip name conflict: ' +
                                os.path.basename(z))
            build_utils.ExtractAll(z, path=subdir)
            dep_subdirs.append(subdir)

        # Generate R.java. This R.java contains non-final constants and is used only
        # while compiling the library jar (e.g. chromium_content.jar). When building
        # an apk, a new R.java file with the correct resource -> ID mappings will be
        # generated by merging the resources from all libraries and the main apk
        # project.
        package_command = [
            aapt,
            'package',
            '-m',
            '-M',
            options.android_manifest,
            '--auto-add-overlay',
            '--no-version-vectors',
            '-I',
            options.android_sdk_jar,
            '--output-text-symbols',
            gen_dir,
            '-J',
            gen_dir,  # Required for R.txt generation.
            '--ignore-assets',
            build_utils.AAPT_IGNORE_PATTERN
        ]

        # aapt supports only the "--include-all-resources" mode, where each R.java
        # file ends up with all symbols, rather than only those that it had at the
        # time it was originally generated. This subtle difference makes no
        # difference when compiling, but can lead to increased unused symbols in the
        # resulting R.class files.
        # TODO(agrieve): See if proguard makes this difference actually translate
        # into a size difference. If not, we can delete all of our custom R.java
        # template code above (and make include_all_resources the default).
        if options.include_all_resources:
            srcjar_dir = gen_dir
            if options.extra_res_packages:
                colon_separated = ':'.join(options.extra_res_packages)
                package_command += ['--extra-packages', colon_separated]
            if options.non_constant_id:
                package_command.append('--non-constant-id')
            if options.custom_package:
                package_command += ['--custom-package', options.custom_package]
            if options.shared_resources:
                package_command.append('--shared-lib')
            if options.app_as_shared_lib:
                package_command.append('--app-as-shared-lib')

        for d in input_resource_dirs:
            package_command += ['-S', d]

        # Adding all dependencies as sources is necessary for @type/foo references
        # to symbols within dependencies to resolve. However, it has the side-effect
        # that all Java symbols from dependencies are copied into the new R.java.
        # E.g.: It enables an arguably incorrect usage of
        # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be
        # more correct. This is just how Android works.
        for d in dep_subdirs:
            package_command += ['-S', d]

        if options.proguard_file:
            package_command += ['-G', options.proguard_file]
        if options.proguard_file_main_dex:
            package_command += ['-D', options.proguard_file_main_dex]
        build_utils.CheckOutput(package_command, print_stderr=False)

        # When an empty res/ directory is passed, aapt does not write an R.txt.
        if not os.path.exists(r_txt_path):
            build_utils.Touch(r_txt_path)

        if not options.include_all_resources:
            # --include-all-resources can only be specified for generating final R
            # classes for APK. It makes no sense for APK to have pre-generated R.txt
            # though, because aapt-generated already lists all available resources.
            if options.r_text_in:
                r_txt_path = options.r_text_in

            packages = list(options.extra_res_packages)
            r_txt_files = list(options.extra_r_text_files)

            cur_package = options.custom_package
            if not options.custom_package:
                cur_package = _ExtractPackageFromManifest(
                    options.android_manifest)

            # Don't create a .java file for the current resource target when:
            # - no package name was provided (either by manifest or build rules),
            # - there was already a dependent android_resources() with the same
            #   package (occurs mostly when an apk target and resources target share
            #   an AndroidManifest.xml)
            if cur_package != 'org.dummy' and cur_package not in packages:
                packages.append(cur_package)
                r_txt_files.append(r_txt_path)

            if packages:
                shared_resources = options.shared_resources or options.app_as_shared_lib
                CreateRJavaFiles(srcjar_dir, r_txt_path, packages, r_txt_files,
                                 shared_resources)

        # This is the list of directories with resources to put in the final .zip
        # file. The order of these is important so that crunched/v14 resources
        # override the normal ones.
        zip_resource_dirs = input_resource_dirs + [v14_dir]

        base_crunch_dir = os.path.join(temp_dir, 'crunch')

        # Crunch image resources. This shrinks png files and is necessary for
        # 9-patch images to display correctly. 'aapt crunch' accepts only a single
        # directory at a time and deletes everything in the output directory.
        for idx, input_dir in enumerate(input_resource_dirs):
            crunch_dir = os.path.join(base_crunch_dir, str(idx))
            build_utils.MakeDirectory(crunch_dir)
            zip_resource_dirs.append(crunch_dir)
            CrunchDirectory(aapt, input_dir, crunch_dir)

        ZipResources(zip_resource_dirs, options.resource_zip_out)

        if options.all_resources_zip_out:
            CombineZips([options.resource_zip_out] + dep_zips,
                        options.all_resources_zip_out)

        if options.R_dir:
            build_utils.DeleteDirectory(options.R_dir)
            shutil.copytree(srcjar_dir, options.R_dir)
        else:
            build_utils.ZipDir(options.srcjar_out, srcjar_dir)

        if options.r_text_out:
            shutil.copyfile(r_txt_path, options.r_text_out)
コード例 #19
0
def _OnStaleMd5(options, javac_cmd, java_files, classpath):
    logging.info('Starting _OnStaleMd5')

    # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
    # rules run both in parallel, with Error Prone only used for checks.
    save_outputs = not options.enable_errorprone

    with build_utils.TempDir() as temp_dir:
        srcjars = options.java_srcjars

        classes_dir = os.path.join(temp_dir, 'classes')
        os.makedirs(classes_dir)

        if save_outputs:
            generated_java_dir = options.generated_dir
        else:
            generated_java_dir = os.path.join(temp_dir, 'gen')

        shutil.rmtree(generated_java_dir, True)

        srcjar_files = {}
        if srcjars:
            logging.info('Extracting srcjars to %s', generated_java_dir)
            build_utils.MakeDirectory(generated_java_dir)
            jar_srcs = []
            for srcjar in options.java_srcjars:
                extracted_files = build_utils.ExtractAll(
                    srcjar,
                    no_clobber=True,
                    path=generated_java_dir,
                    pattern='*.java')
                for path in extracted_files:
                    # We want the path inside the srcjar so the viewer can have a tree
                    # structure.
                    srcjar_files[path] = '{}/{}'.format(
                        srcjar, os.path.relpath(path, generated_java_dir))
                jar_srcs.extend(extracted_files)
            logging.info('Done extracting srcjars')
            java_files.extend(jar_srcs)

        if java_files:
            # Don't include the output directory in the initial set of args since it
            # being in a temp dir makes it unstable (breaks md5 stamping).
            cmd = javac_cmd + ['-d', classes_dir]

            # Pass classpath and source paths as response files to avoid extremely
            # long command lines that are tedius to debug.
            if classpath:
                cmd += ['-classpath', ':'.join(classpath)]

            java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
            with open(java_files_rsp_path, 'w') as f:
                f.write(' '.join(java_files))
            cmd += ['@' + java_files_rsp_path]

            logging.debug('Build command %s', cmd)
            build_utils.CheckOutput(cmd,
                                    print_stdout=options.chromium_code,
                                    stderr_filter=ProcessJavacOutput)
            logging.info('Finished build command')

        if save_outputs:
            # Creating the jar file takes the longest, start it first on a separate
            # process to unblock the rest of the post-processing steps.
            jar_file_worker = multiprocessing.Process(
                target=_CreateJarFile,
                args=(options.jar_path, options.provider_configurations,
                      options.additional_jar_files, classes_dir))
            jar_file_worker.start()
        else:
            jar_file_worker = None
            build_utils.Touch(options.jar_path)

        if save_outputs:
            _CreateInfoFile(java_files, options.jar_path,
                            options.chromium_code, srcjar_files, classes_dir,
                            generated_java_dir)
        else:
            build_utils.Touch(options.jar_path + '.info')

        if jar_file_worker:
            jar_file_worker.join()
        logging.info('Completed all steps in _OnStaleMd5')
コード例 #20
0
def main():
    args, unknown_args = _CreateArgparser().parse_known_args()
    # TODO(bsheedy): Remove this once all uses of --chartjson are removed.
    if args.chartjson:
        args.output_format = 'chartjson'

    chartjson = _BASE_CHART.copy() if args.output_format else None

    with build_utils.TempDir() as base_dir, build_utils.TempDir() as diff_dir:
        # Run resource_sizes.py on the two APKs
        resource_sizes_path = os.path.join(_ANDROID_DIR, 'resource_sizes.py')
        shared_args = (
            ['python', resource_sizes_path, '--output-format=chartjson'] +
            unknown_args)

        base_args = shared_args + ['--output-dir', base_dir, args.base_apk]
        if args.out_dir_base:
            base_args += ['--chromium-output-directory', args.out_dir_base]
        try:
            subprocess.check_output(base_args, stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            print(e.output)
            raise

        diff_args = shared_args + ['--output-dir', diff_dir, args.diff_apk]
        if args.out_dir_diff:
            diff_args += ['--chromium-output-directory', args.out_dir_diff]
        try:
            subprocess.check_output(diff_args, stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            print(e.output)
            raise

        # Combine the separate results
        base_file = os.path.join(base_dir, _CHARTJSON_FILENAME)
        diff_file = os.path.join(diff_dir, _CHARTJSON_FILENAME)
        base_results = shared_preference_utils.ExtractSettingsFromJson(
            base_file)
        diff_results = shared_preference_utils.ExtractSettingsFromJson(
            diff_file)
        DiffResults(chartjson, base_results, diff_results)
        if args.include_intermediate_results:
            AddIntermediateResults(chartjson, base_results, diff_results)

        if args.output_format:
            chartjson_path = os.path.join(os.path.abspath(args.output_dir),
                                          _CHARTJSON_FILENAME)
            logging.critical('Dumping diff chartjson to %s', chartjson_path)
            with open(chartjson_path, 'w') as outfile:
                json.dump(chartjson, outfile)

            if args.output_format == 'histograms':
                histogram_result = convert_chart_json.ConvertChartJson(
                    chartjson_path)
                if histogram_result.returncode != 0:
                    logging.error('chartjson conversion failed with error: %s',
                                  histogram_result.stdout)
                    return 1

                histogram_path = os.path.join(os.path.abspath(args.output_dir),
                                              'perf_results.json')
                logging.critical('Dumping diff histograms to %s',
                                 histogram_path)
                with open(histogram_path, 'w') as json_file:
                    json_file.write(histogram_result.stdout)
コード例 #21
0
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs,
                classpath):
    logging.info('Starting _OnStaleMd5')
    # Don't bother enabling incremental compilation for non-chromium code.
    incremental = options.incremental and options.chromium_code

    # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
    # rules run both in parallel, with Error Prone only used for checks.
    save_outputs = not options.enable_errorprone

    with build_utils.TempDir() as temp_dir:
        srcjars = options.java_srcjars

        classes_dir = os.path.join(temp_dir, 'classes')
        os.makedirs(classes_dir)

        changed_paths = None
        # jmake can handle deleted files, but it's a rare case and it would
        # complicate this script's logic.
        if incremental and changes.AddedOrModifiedOnly():
            changed_paths = set(changes.IterChangedPaths())
            # Do a full compile if classpath has changed.
            # jmake doesn't seem to do this on its own... Might be that ijars mess up
            # its change-detection logic.
            if any(p in changed_paths for p in classpath_inputs):
                changed_paths = None

        if options.incremental:
            pdb_path = options.jar_path + '.pdb'

        if incremental:
            # jmake is a compiler wrapper that figures out the minimal set of .java
            # files that need to be rebuilt given a set of .java files that have
            # changed.
            # jmake determines what files are stale based on timestamps between .java
            # and .class files. Since we use .jars, .srcjars, and md5 checks,
            # timestamp info isn't accurate for this purpose. Rather than use jmake's
            # programatic interface (like we eventually should), we ensure that all
            # .class files are newer than their .java files, and convey to jmake which
            # sources are stale by having their .class files be missing entirely
            # (by not extracting them).
            javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path)

        if save_outputs:
            generated_java_dir = options.generated_dir
        else:
            generated_java_dir = os.path.join(temp_dir, 'gen')

        # Incremental means not all files will be extracted, so don't bother
        # clearing out stale generated files.
        if not incremental:
            shutil.rmtree(generated_java_dir, True)

        srcjar_files = {}
        if srcjars:
            logging.info('Extracting srcjars to %s', generated_java_dir)
            build_utils.MakeDirectory(generated_java_dir)
            jar_srcs = []
            for srcjar in options.java_srcjars:
                if changed_paths:
                    changed_paths.update(
                        os.path.join(generated_java_dir, f)
                        for f in changes.IterChangedSubpaths(srcjar))
                extracted_files = build_utils.ExtractAll(
                    srcjar,
                    no_clobber=not incremental,
                    path=generated_java_dir,
                    pattern='*.java')
                for path in extracted_files:
                    # We want the path inside the srcjar so the viewer can have a tree
                    # structure.
                    srcjar_files[path] = '{}/{}'.format(
                        srcjar, os.path.relpath(path, generated_java_dir))
                jar_srcs.extend(extracted_files)
            logging.info('Done extracting srcjars')
            java_files.extend(jar_srcs)
            if changed_paths:
                # Set the mtime of all sources to 0 since we use the absence of .class
                # files to tell jmake which files are stale.
                for path in jar_srcs:
                    os.utime(path, (0, 0))

        if java_files:
            if changed_paths:
                changed_java_files = [
                    p for p in java_files if p in changed_paths
                ]
                if os.path.exists(options.jar_path):
                    _ExtractClassFiles(options.jar_path, classes_dir,
                                       changed_java_files)
                # Add the extracted files to the classpath. This is required because
                # when compiling only a subset of files, classes that haven't changed
                # need to be findable.
                classpath.append(classes_dir)

            # Can happen when a target goes from having no sources, to having sources.
            # It's created by the call to build_utils.Touch() below.
            if incremental:
                if os.path.exists(pdb_path) and not os.path.getsize(pdb_path):
                    os.unlink(pdb_path)

            # Don't include the output directory in the initial set of args since it
            # being in a temp dir makes it unstable (breaks md5 stamping).
            cmd = javac_cmd + ['-d', classes_dir]

            # Pass classpath and source paths as response files to avoid extremely
            # long command lines that are tedius to debug.
            if classpath:
                cmd += ['-classpath', ':'.join(classpath)]

            java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
            with open(java_files_rsp_path, 'w') as f:
                f.write(' '.join(java_files))
            cmd += ['@' + java_files_rsp_path]

            # JMake prints out some diagnostic logs that we want to ignore.
            # This assumes that all compiler output goes through stderr.
            stdout_filter = lambda s: ''
            if md5_check.PRINT_EXPLANATIONS:
                stdout_filter = None

            logging.debug('Build command %s', cmd)
            attempt_build = lambda: build_utils.CheckOutput(
                cmd,
                print_stdout=options.chromium_code,
                stdout_filter=stdout_filter,
                stderr_filter=ProcessJavacOutput)
            try:
                attempt_build()
            except build_utils.CalledProcessError as e:
                # Work-around for a bug in jmake (http://crbug.com/551449).
                if ('project database corrupted' not in e.output
                        and 'jmake: internal Java exception' not in e.output):
                    raise
                logging.error(
                    'Applying work-around for jmake project database corrupted '
                    '(http://crbug.com/551449).')
                os.unlink(pdb_path)
                attempt_build()
            logging.info('Finished build command')

        if options.incremental or save_outputs:
            # Creating the jar file takes the longest, start it first on a separate
            # process to unblock the rest of the post-processing steps.
            jar_file_worker = multiprocessing.Process(
                target=_CreateJarFile,
                args=(options.jar_path, options.provider_configurations,
                      options.additional_jar_files, classes_dir))
            jar_file_worker.start()
        else:
            jar_file_worker = None
            build_utils.Touch(options.jar_path)

        if save_outputs:
            _CreateInfoFile(java_files, options.jar_path,
                            options.chromium_code, srcjar_files, classes_dir,
                            generated_java_dir)
        else:
            build_utils.Touch(options.jar_path + '.info')

        if options.incremental and (not java_files or not incremental):
            # Make sure output exists.
            build_utils.Touch(pdb_path)

        if jar_file_worker:
            jar_file_worker.join()
        logging.info('Completed all steps in _OnStaleMd5')
コード例 #22
0
def _OnStaleMd5(lint_path,
                config_path,
                processed_config_path,
                manifest_path,
                result_path,
                product_dir,
                sources,
                jar_path,
                cache_dir,
                android_sdk_version,
                srcjars,
                min_sdk_version,
                manifest_package,
                resource_sources,
                disable=None,
                classpath=None,
                can_fail_build=False,
                include_unexpected=False,
                silent=False):

  def _RebasePath(path):
    """Returns relative path to top-level src dir.

    Args:
      path: A path relative to cwd.
    """
    ret = os.path.relpath(os.path.abspath(path), build_utils.DIR_SOURCE_ROOT)
    # If it's outside of src/, just use abspath.
    if ret.startswith('..'):
      ret = os.path.abspath(path)
    return ret

  def _ProcessConfigFile():
    if not config_path or not processed_config_path:
      return
    if not build_utils.IsTimeStale(processed_config_path, [config_path]):
      return

    with open(config_path, 'rb') as f:
      content = f.read().replace(
          'PRODUCT_DIR', _RebasePath(product_dir))

    with open(processed_config_path, 'wb') as f:
      f.write(content)

  def _ProcessResultFile():
    with open(result_path, 'rb') as f:
      content = f.read().replace(
          _RebasePath(product_dir), 'PRODUCT_DIR')

    with open(result_path, 'wb') as f:
      f.write(content)

  def _ParseAndShowResultFile():
    dom = minidom.parse(result_path)
    issues = dom.getElementsByTagName('issue')
    if not silent:
      print(file=sys.stderr)
      for issue in issues:
        issue_id = issue.attributes['id'].value
        message = issue.attributes['message'].value
        location_elem = issue.getElementsByTagName('location')[0]
        path = location_elem.attributes['file'].value
        line = location_elem.getAttribute('line')
        if line:
          error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
        else:
          # Issues in class files don't have a line number.
          error = '%s %s: %s [warning]' % (path, message, issue_id)
        print(error.encode('utf-8'), file=sys.stderr)
        for attr in ['errorLine1', 'errorLine2']:
          error_line = issue.getAttribute(attr)
          if error_line:
            print(error_line.encode('utf-8'), file=sys.stderr)
    return len(issues)

  with build_utils.TempDir() as temp_dir:
    _ProcessConfigFile()

    cmd = [
        _RebasePath(lint_path), '-Werror', '--exitcode', '--showall',
        '--xml', _RebasePath(result_path),
    ]
    if jar_path:
      # --classpath is just for .class files for this one target.
      cmd.extend(['--classpath', _RebasePath(jar_path)])
    if processed_config_path:
      cmd.extend(['--config', _RebasePath(processed_config_path)])

    tmp_dir_counter = [0]
    def _NewTempSubdir(prefix, append_digit=True):
      # Helper function to create a new sub directory based on the number of
      # subdirs created earlier.
      if append_digit:
        tmp_dir_counter[0] += 1
        prefix += str(tmp_dir_counter[0])
      new_dir = os.path.join(temp_dir, prefix)
      os.makedirs(new_dir)
      return new_dir

    resource_dirs = []
    for resource_source in resource_sources:
      if os.path.isdir(resource_source):
        resource_dirs.append(resource_source)
      else:
        # This is a zip file with generated resources (e. g. strings from GRD).
        # Extract it to temporary folder.
        resource_dir = _NewTempSubdir(resource_source, append_digit=False)
        resource_dirs.append(resource_dir)
        build_utils.ExtractAll(resource_source, path=resource_dir)

    for resource_dir in resource_dirs:
      cmd.extend(['--resources', _RebasePath(resource_dir)])

    if classpath:
      # --libraries is the classpath (excluding active target).
      cp = ':'.join(_RebasePath(p) for p in classpath)
      cmd.extend(['--libraries', cp])

    # There may be multiple source files with the same basename (but in
    # different directories). It is difficult to determine what part of the path
    # corresponds to the java package, and so instead just link the source files
    # into temporary directories (creating a new one whenever there is a name
    # conflict).
    def PathInDir(d, src):
      subpath = os.path.join(d, _RebasePath(src))
      subdir = os.path.dirname(subpath)
      if not os.path.exists(subdir):
        os.makedirs(subdir)
      return subpath

    src_dirs = []
    for src in sources:
      src_dir = None
      for d in src_dirs:
        if not os.path.exists(PathInDir(d, src)):
          src_dir = d
          break
      if not src_dir:
        src_dir = _NewTempSubdir('SRC_ROOT')
        src_dirs.append(src_dir)
        cmd.extend(['--sources', _RebasePath(src_dir)])
      # In cases where the build dir is outside of the src dir, this can
      # result in trying to symlink a file to itself for this file:
      # gen/components/version_info/android/java/org/chromium/
      #   components/version_info/VersionConstants.java
      src = os.path.abspath(src)
      dst = PathInDir(src_dir, src)
      if src == dst:
        continue
      os.symlink(src, dst)

    if srcjars:
      srcjar_paths = build_utils.ParseGnList(srcjars)
      if srcjar_paths:
        srcjar_dir = _NewTempSubdir('SRC_ROOT')
        cmd.extend(['--sources', _RebasePath(srcjar_dir)])
        for srcjar in srcjar_paths:
          build_utils.ExtractAll(srcjar, path=srcjar_dir)

    if disable:
      cmd.extend(['--disable', ','.join(disable)])

    project_dir = _NewTempSubdir('SRC_ROOT')
    if android_sdk_version:
      # Create dummy project.properies file in a temporary "project" directory.
      # It is the only way to add Android SDK to the Lint's classpath. Proper
      # classpath is necessary for most source-level checks.
      with open(os.path.join(project_dir, 'project.properties'), 'w') \
          as propfile:
        print('target=android-{}'.format(android_sdk_version), file=propfile)

    # Put the manifest in a temporary directory in order to avoid lint detecting
    # sibling res/ and src/ directories (which should be pass explicitly if they
    # are to be included).
    if not manifest_path:
      manifest_path = os.path.join(
          build_utils.DIR_SOURCE_ROOT, 'build', 'android',
          'AndroidManifest.xml')
    lint_manifest_path = os.path.join(project_dir, 'AndroidManifest.xml')
    shutil.copyfile(os.path.abspath(manifest_path), lint_manifest_path)

    # Check that minSdkVersion and package is correct and add it to the manifest
    # in case it does not exist.
    doc, manifest, _ = manifest_utils.ParseManifest(lint_manifest_path)
    manifest_utils.AssertUsesSdk(manifest, min_sdk_version)
    manifest_utils.AssertPackage(manifest, manifest_package)
    uses_sdk = manifest.find('./uses-sdk')
    if uses_sdk is None:
      uses_sdk = ElementTree.Element('uses-sdk')
      manifest.insert(0, uses_sdk)
    uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
                 min_sdk_version)
    if manifest_package:
      manifest.set('package', manifest_package)
    manifest_utils.SaveManifest(doc, lint_manifest_path)

    cmd.append(project_dir)

    if os.path.exists(result_path):
      os.remove(result_path)

    env = os.environ.copy()
    stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings
    if cache_dir:
      env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RebasePath(cache_dir)
      # When _JAVA_OPTIONS is set, java prints to stderr:
      # Picked up _JAVA_OPTIONS: ...
      #
      # We drop all lines that contain _JAVA_OPTIONS from the output
      stderr_filter = lambda l: re.sub(
          r'.*_JAVA_OPTIONS.*\n?',
          '',
          build_utils.FilterReflectiveAccessJavaWarnings(l))

    def fail_func(returncode, stderr):
      if returncode != 0:
        return True
      if (include_unexpected and
          'Unexpected failure during lint analysis' in stderr):
        return True
      return False

    try:
      env['JAVA_HOME'] = os.path.relpath(build_utils.JAVA_HOME,
                                         build_utils.DIR_SOURCE_ROOT)
      build_utils.CheckOutput(cmd, cwd=build_utils.DIR_SOURCE_ROOT,
                              env=env or None, stderr_filter=stderr_filter,
                              fail_func=fail_func)
    except build_utils.CalledProcessError:
      # There is a problem with lint usage
      if not os.path.exists(result_path):
        raise

      # Sometimes produces empty (almost) files:
      if os.path.getsize(result_path) < 10:
        if can_fail_build:
          raise
        elif not silent:
          traceback.print_exc()
        return

      # There are actual lint issues
      try:
        num_issues = _ParseAndShowResultFile()
      except Exception: # pylint: disable=broad-except
        if not silent:
          print('Lint created unparseable xml file...')
          print('File contents:')
          with open(result_path) as f:
            print(f.read())
          if can_fail_build:
            traceback.print_exc()
        if can_fail_build:
          raise
        else:
          return

      _ProcessResultFile()
      if num_issues == 0 and include_unexpected:
        msg = 'Please refer to output above for unexpected lint failures.\n'
      else:
        msg = ('\nLint found %d new issues.\n'
               ' - For full explanation, please refer to %s\n'
               ' - For more information about lint and how to fix lint issues,'
               ' please refer to %s\n' %
               (num_issues, _RebasePath(result_path), _LINT_MD_URL))
      if not silent:
        print(msg, file=sys.stderr)
      if can_fail_build:
        raise Exception('Lint failed.')
コード例 #23
0
def _CreateNormalizedManifest(options):
    with build_utils.TempDir() as tempdir:
        fixed_manifest, _ = _FixManifest(options, tempdir)
        with open(fixed_manifest) as f:
            return manifest_utils.NormalizeManifest(f.read())
コード例 #24
0
ファイル: proguard.py プロジェクト: yuming2x/chromium
def _OptimizeWithR8(options,
                    config_paths,
                    libraries,
                    dynamic_config_data,
                    print_stdout=False):
    with build_utils.TempDir() as tmp_dir:
        if dynamic_config_data:
            tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt')
            with open(tmp_config_path, 'w') as f:
                f.write(dynamic_config_data)
            config_paths = config_paths + [tmp_config_path]

        tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
        # If there is no output (no classes are kept), this prevents this script
        # from failing.
        build_utils.Touch(tmp_mapping_path)

        output_is_zipped = not options.output_path.endswith('.dex')
        tmp_output = os.path.join(tmp_dir, 'r8out')
        if output_is_zipped:
            tmp_output += '.jar'
        else:
            os.mkdir(tmp_output)

        cmd = [
            build_utils.JAVA_PATH,
            '-jar',
            options.r8_path,
            '--no-desugaring',
            '--no-data-resources',
            '--output',
            tmp_output,
            '--pg-map-output',
            tmp_mapping_path,
        ]

        for lib in libraries:
            cmd += ['--lib', lib]

        for config_file in config_paths:
            cmd += ['--pg-conf', config_file]

        if options.min_api:
            cmd += ['--min-api', options.min_api]

        if options.main_dex_rules_path:
            for main_dex_rule in options.main_dex_rules_path:
                cmd += ['--main-dex-rules', main_dex_rule]

        cmd += options.input_paths

        stderr_filter = None
        env = os.environ.copy()
        if options.disable_outlining:
            stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)
            env['_JAVA_OPTIONS'] = '-Dcom.android.tools.r8.disableOutlining=1'

        build_utils.CheckOutput(cmd,
                                env=env,
                                print_stdout=print_stdout,
                                stderr_filter=stderr_filter)

        if not output_is_zipped:
            found_files = os.listdir(tmp_output)
            if len(found_files) > 1:
                raise Exception(
                    'Too many files created: {}'.format(found_files))
            tmp_output = os.path.join(tmp_output, found_files[0])

        # Copy output files to correct locations.
        shutil.move(tmp_output, options.output_path)

        with open(options.mapping_output, 'w') as out_file, \
            open(tmp_mapping_path) as in_file:
            # Mapping files generated by R8 include comments that may break
            # some of our tooling so remove those (specifically: apkanalyzer).
            out_file.writelines(l for l in in_file if not l.startswith('#'))
コード例 #25
0
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs,
                runtime_classpath):
  with build_utils.TempDir() as temp_dir:
    srcjars = options.java_srcjars
    # The .excluded.jar contains .class files excluded from the main jar.
    # It is used for incremental compiles.
    excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar')

    classes_dir = os.path.join(temp_dir, 'classes')
    os.makedirs(classes_dir)

    changed_paths = None
    # jmake can handle deleted files, but it's a rare case and it would
    # complicate this script's logic.
    if options.incremental and changes.AddedOrModifiedOnly():
      changed_paths = set(changes.IterChangedPaths())
      # Do a full compile if classpath has changed.
      # jmake doesn't seem to do this on its own... Might be that ijars mess up
      # its change-detection logic.
      if any(p in changed_paths for p in classpath_inputs):
        changed_paths = None

    if options.incremental:
      # jmake is a compiler wrapper that figures out the minimal set of .java
      # files that need to be rebuilt given a set of .java files that have
      # changed.
      # jmake determines what files are stale based on timestamps between .java
      # and .class files. Since we use .jars, .srcjars, and md5 checks,
      # timestamp info isn't accurate for this purpose. Rather than use jmake's
      # programatic interface (like we eventually should), we ensure that all
      # .class files are newer than their .java files, and convey to jmake which
      # sources are stale by having their .class files be missing entirely
      # (by not extracting them).
      pdb_path = options.jar_path + '.pdb'
      javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path)
      if srcjars:
        _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir)

    if srcjars:
      java_dir = os.path.join(temp_dir, 'java')
      os.makedirs(java_dir)
      for srcjar in options.java_srcjars:
        if changed_paths:
          changed_paths.update(os.path.join(java_dir, f)
                               for f in changes.IterChangedSubpaths(srcjar))
        build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java')
      jar_srcs = build_utils.FindInDirectory(java_dir, '*.java')
      jar_srcs = _FilterJavaFiles(jar_srcs, options.javac_includes)
      java_files.extend(jar_srcs)
      if changed_paths:
        # Set the mtime of all sources to 0 since we use the absense of .class
        # files to tell jmake which files are stale.
        for path in jar_srcs:
          os.utime(path, (0, 0))

    if java_files:
      if changed_paths:
        changed_java_files = [p for p in java_files if p in changed_paths]
        if os.path.exists(options.jar_path):
          _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files)
        if os.path.exists(excluded_jar_path):
          _ExtractClassFiles(excluded_jar_path, classes_dir, changed_java_files)
        # Add the extracted files to the classpath. This is required because
        # when compiling only a subset of files, classes that haven't changed
        # need to be findable.
        classpath_idx = javac_cmd.index('-classpath')
        javac_cmd[classpath_idx + 1] += ':' + classes_dir

      # Don't include the output directory in the initial set of args since it
      # being in a temp dir makes it unstable (breaks md5 stamping).
      cmd = javac_cmd + ['-d', classes_dir] + java_files

      # JMake prints out some diagnostic logs that we want to ignore.
      # This assumes that all compiler output goes through stderr.
      stdout_filter = lambda s: ''
      if md5_check.PRINT_EXPLANATIONS:
        stdout_filter = None

      build_utils.CheckOutput(
          cmd,
          print_stdout=options.chromium_code,
          stdout_filter=stdout_filter,
          stderr_filter=ColorJavacOutput)

    if options.main_class or options.manifest_entry:
      entries = []
      if options.manifest_entry:
        entries = [e.split(':') for e in options.manifest_entry]
      manifest_file = os.path.join(temp_dir, 'manifest')
      _CreateManifest(manifest_file, runtime_classpath, options.main_class,
                      entries)
    else:
      manifest_file = None

    glob = options.jar_excluded_classes
    inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob)
    exclusion_predicate = lambda f: not inclusion_predicate(f)

    jar.JarDirectory(classes_dir,
                     options.jar_path,
                     manifest_file=manifest_file,
                     predicate=inclusion_predicate)
    jar.JarDirectory(classes_dir,
                     excluded_jar_path,
                     predicate=exclusion_predicate)
コード例 #26
0
def main(args):
    args = build_utils.ExpandFileArgs(args)
    options = _ParseArgs(args)

    # TODO(crbug.com/846633): Enable language-based configuration splits once
    # Chromium detects the appropriate fallback locales when needed.
    # split_dimensions = [ 'LANGUAGE' ]
    split_dimensions = []

    bundle_config = _GenerateBundleConfigJson(
        options.uncompressed_assets, options.uncompress_shared_libraries,
        split_dimensions)
    with build_utils.TempDir() as tmp_dir:
        module_zips = [
            _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
            for module in options.module_zips]

        tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')

        tmp_unsigned_bundle = tmp_bundle
        if options.keystore_path:
            tmp_unsigned_bundle = tmp_bundle + '.unsigned'

        # Important: bundletool requires that the bundle config file is
        # named with a .pb.json extension.
        tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'

        with open(tmp_bundle_config, 'w') as f:
            f.write(bundle_config)

        cmd_args = [
            'java', '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-bundle'
        ]
        cmd_args += ['--modules=%s' % ','.join(module_zips)]
        cmd_args += ['--output=%s' % tmp_unsigned_bundle]
        cmd_args += ['--config=%s' % tmp_bundle_config]

        build_utils.CheckOutput(cmd_args, print_stdout=True, print_stderr=True)

        if options.keystore_path:
            # NOTE: As stated by the public documentation, apksigner cannot be used
            # to sign the bundle (because it rejects anything that isn't an APK).
            # The signature and digest algorithm selection come from the internal
            # App Bundle documentation. There is no corresponding public doc :-(
            signing_cmd_args = [
                'jarsigner',
                '-sigalg',
                'SHA256withRSA',
                '-digestalg',
                'SHA-256',
                '-keystore',
                'file:' + options.keystore_path,
                '-storepass',
                options.keystore_password,
                '-signedjar',
                tmp_bundle,
                tmp_unsigned_bundle,
                options.key_name,
            ]
            build_utils.CheckOutput(signing_cmd_args, print_stderr=True)

        shutil.move(tmp_bundle, options.out_bundle)
コード例 #27
0
def _OptimizeWithR8(options,
                    config_paths,
                    libraries,
                    dynamic_config_data,
                    print_stdout=False):
    with build_utils.TempDir() as tmp_dir:
        if dynamic_config_data:
            dynamic_config_path = os.path.join(tmp_dir, 'dynamic_config.flags')
            with open(dynamic_config_path, 'w') as f:
                f.write(dynamic_config_data)
            config_paths = config_paths + [dynamic_config_path]

        tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
        # If there is no output (no classes are kept), this prevents this script
        # from failing.
        build_utils.Touch(tmp_mapping_path)

        tmp_output = os.path.join(tmp_dir, 'r8out')
        os.mkdir(tmp_output)

        split_contexts_by_name = {}
        if options.feature_names:
            for name, dest_dex, input_jars in zip(options.feature_names,
                                                  options.dex_dests,
                                                  options.feature_jars):
                parent_name = options.uses_split.get(name)
                if parent_name is None and name != 'base':
                    parent_name = 'base'
                split_context = _SplitContext(name,
                                              dest_dex,
                                              input_jars,
                                              tmp_output,
                                              parent_name=parent_name)
                split_contexts_by_name[name] = split_context
        else:
            # Base context will get populated via "extra_jars" below.
            split_contexts_by_name['base'] = _SplitContext(
                'base', options.output_path, [], tmp_output)
        base_context = split_contexts_by_name['base']

        cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
            '-Dcom.android.tools.r8.allowTestProguardOptions=1',
            '-Dcom.android.tools.r8.verticalClassMerging=1',
        ]
        if options.disable_outlining:
            cmd += ['-Dcom.android.tools.r8.disableOutlining=1']
        if options.dump_inputs:
            cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip']
        cmd += [
            '-cp',
            options.r8_path,
            'com.android.tools.r8.R8',
            '--no-data-resources',
            '--output',
            base_context.staging_dir,
            '--pg-map-output',
            tmp_mapping_path,
        ]

        if options.disable_checks:
            # Info level priority logs are not printed by default.
            cmd += [
                '--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info'
            ]

        if options.desugar_jdk_libs_json:
            cmd += [
                '--desugared-lib',
                options.desugar_jdk_libs_json,
                '--desugared-lib-pg-conf-output',
                options.desugared_library_keep_rule_output,
            ]

        if options.min_api:
            cmd += ['--min-api', options.min_api]

        if options.force_enable_assertions:
            cmd += ['--force-enable-assertions']

        for lib in libraries:
            cmd += ['--lib', lib]

        for config_file in config_paths:
            cmd += ['--pg-conf', config_file]

        if options.main_dex_rules_path:
            for main_dex_rule in options.main_dex_rules_path:
                cmd += ['--main-dex-rules', main_dex_rule]

        _DeDupeInputJars(split_contexts_by_name)

        # Add any extra inputs to the base context (e.g. desugar runtime).
        extra_jars = set(options.input_paths)
        for split_context in split_contexts_by_name.values():
            extra_jars -= split_context.input_jars
        base_context.input_jars.update(extra_jars)

        for split_context in split_contexts_by_name.values():
            if split_context is base_context:
                continue
            for in_jar in sorted(split_context.input_jars):
                cmd += ['--feature', in_jar, split_context.staging_dir]

        cmd += sorted(base_context.input_jars)

        try:
            stderr_filter = dex.CreateStderrFilter(
                options.show_desugar_default_interface_warnings)
            logging.debug('Running R8')
            build_utils.CheckOutput(cmd,
                                    print_stdout=print_stdout,
                                    stderr_filter=stderr_filter,
                                    fail_on_output=options.warnings_as_errors)
        except build_utils.CalledProcessError as err:
            debugging_link = ('\n\nR8 failed. Please see {}.'.format(
                'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
                'android/docs/java_optimization.md#Debugging-common-failures\n'
            ))
            raise build_utils.CalledProcessError(err.cwd, err.args,
                                                 err.output + debugging_link)

        base_has_imported_lib = False
        if options.desugar_jdk_libs_json:
            logging.debug('Running L8')
            existing_files = build_utils.FindInDirectory(
                base_context.staging_dir)
            jdk_dex_output = os.path.join(
                base_context.staging_dir,
                'classes%d.dex' % (len(existing_files) + 1))
            # Use -applymapping to avoid name collisions.
            l8_dynamic_config_path = os.path.join(tmp_dir,
                                                  'l8_dynamic_config.flags')
            with open(l8_dynamic_config_path, 'w') as f:
                f.write("-applymapping '{}'\n".format(tmp_mapping_path))
            # Pass the dynamic config so that obfuscation options are picked up.
            l8_config_paths = [dynamic_config_path, l8_dynamic_config_path]
            if os.path.exists(options.desugared_library_keep_rule_output):
                l8_config_paths.append(
                    options.desugared_library_keep_rule_output)

            base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
                options.r8_path, options.min_api,
                options.desugar_jdk_libs_json, options.desugar_jdk_libs_jar,
                options.desugar_jdk_libs_configuration_jar, jdk_dex_output,
                options.warnings_as_errors, l8_config_paths)
            if int(options.min_api) >= 24 and base_has_imported_lib:
                with open(jdk_dex_output, 'rb') as f:
                    dexfile = dex_parser.DexFile(bytearray(f.read()))
                    for m in dexfile.IterMethodSignatureParts():
                        print('{}#{}'.format(m[0], m[2]))
                assert False, (
                    'Desugared JDK libs are disabled on Monochrome and newer - see '
                    'crbug.com/1159984 for details, and see above list for desugared '
                    'classes and methods.')

        logging.debug('Collecting ouputs')
        base_context.CreateOutput(base_has_imported_lib,
                                  options.desugared_library_keep_rule_output)
        for split_context in split_contexts_by_name.values():
            if split_context is not base_context:
                split_context.CreateOutput()

        with open(options.mapping_output, 'w') as out_file, \
            open(tmp_mapping_path) as in_file:
            # Mapping files generated by R8 include comments that may break
            # some of our tooling so remove those (specifically: apkanalyzer).
            out_file.writelines(l for l in in_file if not l.startswith('#'))
    return base_context
コード例 #28
0
ファイル: process_resources.py プロジェクト: dalecurtis/mojo
def main():
  args = build_utils.ExpandFileArgs(sys.argv[1:])

  options = ParseArgs(args)
  android_jar = os.path.join(options.android_sdk, 'android.jar')
  aapt = os.path.join(options.android_sdk_tools, 'aapt')

  input_files = []

  with build_utils.TempDir() as temp_dir:
    deps_dir = os.path.join(temp_dir, 'deps')
    build_utils.MakeDirectory(deps_dir)
    v14_dir = os.path.join(temp_dir, 'v14')
    build_utils.MakeDirectory(v14_dir)

    gen_dir = os.path.join(temp_dir, 'gen')
    build_utils.MakeDirectory(gen_dir)

    input_resource_dirs = build_utils.ParseGypList(options.resource_dirs)

    for resource_dir in input_resource_dirs:
      generate_v14_compatible_resources.GenerateV14Resources(
          resource_dir,
          v14_dir,
          options.v14_verify_only)

    dep_zips = build_utils.ParseGypList(options.dependencies_res_zips)
    input_files += dep_zips
    dep_subdirs = []
    for z in dep_zips:
      subdir = os.path.join(deps_dir, os.path.basename(z))
      if os.path.exists(subdir):
        raise Exception('Resource zip name conflict: ' + os.path.basename(z))
      build_utils.ExtractAll(z, path=subdir)
      dep_subdirs.append(subdir)

    # Generate R.java. This R.java contains non-final constants and is used only
    # while compiling the library jar (e.g. chromium_content.jar). When building
    # an apk, a new R.java file with the correct resource -> ID mappings will be
    # generated by merging the resources from all libraries and the main apk
    # project.
    package_command = [aapt,
                       'package',
                       '-m',
                       '-M', options.android_manifest,
                       '--auto-add-overlay',
                       '-I', android_jar,
                       '--output-text-symbols', gen_dir,
                       '-J', gen_dir]

    for d in input_resource_dirs:
      package_command += ['-S', d]

    for d in dep_subdirs:
      package_command += ['-S', d]

    if options.non_constant_id:
      package_command.append('--non-constant-id')
    if options.custom_package:
      package_command += ['--custom-package', options.custom_package]
    if options.proguard_file:
      package_command += ['-G', options.proguard_file]
    build_utils.CheckOutput(package_command, print_stderr=False)

    if options.extra_res_packages:
      CreateExtraRJavaFiles(
          gen_dir,
          build_utils.ParseGypList(options.extra_res_packages))

    # This is the list of directories with resources to put in the final .zip
    # file. The order of these is important so that crunched/v14 resources
    # override the normal ones.
    zip_resource_dirs = input_resource_dirs + [v14_dir]

    base_crunch_dir = os.path.join(temp_dir, 'crunch')

    # Crunch image resources. This shrinks png files and is necessary for
    # 9-patch images to display correctly. 'aapt crunch' accepts only a single
    # directory at a time and deletes everything in the output directory.
    for idx, d in enumerate(input_resource_dirs):
      crunch_dir = os.path.join(base_crunch_dir, str(idx))
      build_utils.MakeDirectory(crunch_dir)
      zip_resource_dirs.append(crunch_dir)
      aapt_cmd = [aapt,
                  'crunch',
                  '-C', crunch_dir,
                  '-S', d]
      build_utils.CheckOutput(aapt_cmd, stderr_filter=FilterCrunchStderr,
                              fail_func=DidCrunchFail)

    ZipResources(zip_resource_dirs, options.resource_zip_out)

    if options.all_resources_zip_out:
      CombineZips([options.resource_zip_out] + dep_zips,
                  options.all_resources_zip_out)

    if options.R_dir:
      build_utils.DeleteDirectory(options.R_dir)
      shutil.copytree(gen_dir, options.R_dir)
    else:
      build_utils.ZipDir(options.srcjar_out, gen_dir)

  if options.depfile:
    input_files += build_utils.GetPythonDependencies()
    build_utils.WriteDepfile(options.depfile, input_files)

  if options.stamp:
    build_utils.Touch(options.stamp)
コード例 #29
0
def main(args):
    options, paths = _ParseArgs(args)
    if ((options.proguard_enabled == 'true'
         and options.configuration_name == 'Release')
            or (options.debug_build_proguard_enabled == 'true'
                and options.configuration_name == 'Debug')):
        paths = [options.proguard_enabled_input_path]

    if options.inputs:
        paths += options.inputs

    if options.excluded_paths:
        # Excluded paths are relative to the output directory.
        exclude_paths = options.excluded_paths
        paths = [
            p for p in paths if
            not os.path.relpath(p, options.output_directory) in exclude_paths
        ]

    input_paths = list(paths)
    if options.multi_dex:
        input_paths.append(options.main_dex_list_path)

    dex_cmd = ['java', '-jar', options.d8_jar_path]
    if options.multi_dex:
        dex_cmd += ['--main-dex-list', options.main_dex_list_path]

    is_dex = options.dex_path.endswith('.dex')
    is_jar = options.dex_path.endswith('.jar')

    if is_jar and _NoClassFiles(paths):
        # Handle case where no classfiles are specified in inputs
        # by creating an empty JAR
        with zipfile.ZipFile(options.dex_path, 'w') as outfile:
            outfile.comment = 'empty'
    elif is_dex:
        # .dex files can't specify a name for D8. Instead, we output them to a
        # temp directory then move them after the command has finished running
        # (see _MoveTempDexFile). For other files, tmp_dex_dir is None.
        with build_utils.TempDir() as tmp_dex_dir:
            _RunD8(dex_cmd, paths, tmp_dex_dir)
            _MoveTempDexFile(tmp_dex_dir, options.dex_path)
    else:
        _RunD8(dex_cmd, paths, options.dex_path)

    if options.dexlayout_profile:
        with build_utils.TempDir() as temp_dir:
            binary_profile = _CreateBinaryProfile(options.dexlayout_profile,
                                                  options.dex_path,
                                                  options.profman_path,
                                                  temp_dir)
            output_files = _LayoutDex(binary_profile, options.dex_path,
                                      options.dexlayout_path, temp_dir)
            target = None
            if len(output_files) > 1:
                target = _ZipMultidex(temp_dir, output_files)
            else:
                target = output_files[0]
            shutil.move(os.path.join(temp_dir, target), options.dex_path)

    build_utils.WriteDepfile(options.depfile,
                             options.dex_path,
                             input_paths,
                             add_pydeps=False)
コード例 #30
0
ファイル: create_app_bundle.py プロジェクト: wzis/chromium
def main(args):
    args = build_utils.ExpandFileArgs(args)
    options = _ParseArgs(args)

    split_dimensions = []
    if options.split_dimensions:
        split_dimensions = [x.upper() for x in options.split_dimensions]

    with build_utils.TempDir() as tmp_dir:
        module_zips = [
            _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
            for module in options.module_zips]

        base_master_resource_ids = None
        if options.base_module_rtxt_path:
            base_master_resource_ids = _GenerateBaseResourcesAllowList(
                options.base_module_rtxt_path,
                options.base_allowlist_rtxt_path)

        bundle_config = _GenerateBundleConfigJson(
            options.uncompressed_assets, options.compress_shared_libraries,
            split_dimensions, base_master_resource_ids)

        tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')

        # Important: bundletool requires that the bundle config file is
        # named with a .pb.json extension.
        tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'

        with open(tmp_bundle_config, 'w') as f:
            f.write(bundle_config)

        cmd_args = build_utils.JavaCmd(options.warnings_as_errors) + [
            '-jar',
            bundletool.BUNDLETOOL_JAR_PATH,
            'build-bundle',
            '--modules=' + ','.join(module_zips),
            '--output=' + tmp_bundle,
            '--config=' + tmp_bundle_config,
        ]

        build_utils.CheckOutput(
            cmd_args,
            print_stdout=True,
            print_stderr=True,
            stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings,
            fail_on_output=options.warnings_as_errors)

        if options.validate_services:
            # TODO(crbug.com/1126301): This step takes 0.4s locally for bundles with
            # isolated splits disabled and 2s for bundles with isolated splits
            # enabled.  Consider making this run in parallel or move into a separate
            # step before enabling isolated splits by default.
            _MaybeCheckServicesAndProvidersPresentInBase(
                tmp_bundle, module_zips)

        shutil.move(tmp_bundle, options.out_bundle)

    if options.rtxt_out_path:
        _ConcatTextFiles(options.rtxt_in_paths, options.rtxt_out_path)

    if options.pathmap_out_path:
        _WriteBundlePathmap(options.pathmap_in_paths, options.module_names,
                            options.pathmap_out_path)