def _Generate(java_file_paths,
              srcjar_path,
              proxy_opts,
              header_path=None,
              namespace=''):
    """Generates files required to perform JNI registration.

  Generates a srcjar containing a single class, GEN_JNI, that contains all
  native method declarations.

  Optionally generates a header file that provides functions
  (RegisterMainDexNatives and RegisterNonMainDexNatives) to perform
  JNI registration.

  Args:
    java_file_paths: A list of java file paths.
    srcjar_path: Path to the GEN_JNI srcjar.
    header_path: If specified, generates a header file in this location.
    namespace: If specified, sets the namespace for the generated header file.
  """
    # Without multiprocessing, script takes ~13 seconds for chrome_public_apk
    # on a z620. With multiprocessing, takes ~2 seconds.
    pool = multiprocessing.Pool()

    results = []
    for d in pool.imap_unordered(
            functools.partial(_DictForPath,
                              use_proxy_hash=proxy_opts.use_hash),
            java_file_paths):
        if d:
            results.append(d)
    pool.close()

    # Sort to make output deterministic.
    results.sort(key=lambda d: d['FULL_CLASS_NAME'])

    combined_dict = {}
    for key in MERGEABLE_KEYS:
        combined_dict[key] = ''.join(d.get(key, '') for d in results)

    if header_path:
        combined_dict['HEADER_GUARD'] = \
            os.path.splitext(header_path)[0].replace('/', '_').upper() + '_'
        combined_dict['NAMESPACE'] = namespace
        header_content = CreateFromDict(combined_dict, proxy_opts.use_hash)
        with build_utils.AtomicOutput(header_path, mode='w') as f:
            f.write(header_content)

    with build_utils.AtomicOutput(srcjar_path) as f:
        with zipfile.ZipFile(f, 'w') as srcjar:
            build_utils.AddToZipHermetic(
                srcjar,
                '%s.java' % jni_generator.ProxyHelpers.GetQualifiedClass(
                    proxy_opts.use_hash),
                data=CreateProxyJavaFromDict(combined_dict, proxy_opts))
Example #2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--module', required=True, help='The module name.')
    parser.add_argument('--libraries',
                        required=True,
                        help='GN list of native library paths.')
    parser.add_argument('--output',
                        required=True,
                        help='Path to the generated srcjar file.')
    options = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
    options.libraries = build_utils.ParseGnList(options.libraries)

    libraries = []
    for path in options.libraries:
        path = path.strip()
        filename = os.path.split(path)[1]
        assert filename.startswith('lib')
        assert filename.endswith('.so')
        # Remove lib prefix and .so suffix.
        libraries += [filename[3:-3]]

    format_dict = {
        'MODULE': options.module,
        'LIBRARIES': ','.join(['"%s"' % l for l in libraries]),
    }
    with build_utils.AtomicOutput(options.output) as f:
        with zipfile.ZipFile(f.name, 'w') as srcjar_file:
            build_utils.AddToZipHermetic(
                srcjar_file,
                'org/chromium/components/module_installer/builder/'
                'ModuleDescriptor_%s.java' % options.module,
                data=_TEMPLATE.format(**format_dict))
Example #3
0
def main():
    parser = argparse.ArgumentParser(
        description="Generates a Service Manager catalog manifest.")
    parser.add_argument("--output")
    parser.add_argument("--pretty", action="store_true")
    parser.add_argument("--embedded-services",
                        nargs="+",
                        dest="embedded_services",
                        default=[])
    parser.add_argument("--standalone-services",
                        nargs="+",
                        dest="standalone_services",
                        default=[])
    parser.add_argument("--include-catalogs",
                        nargs="+",
                        dest="included_catalogs",
                        default=[])
    parser.add_argument("--override-service-executables",
                        nargs="+",
                        dest="executable_override_specs",
                        default=[])
    args, _ = parser.parse_known_args()

    if args.output is None:
        raise Exception("--output required")

    services = {}
    for subcatalog_path in args.included_catalogs:
        subcatalog = ParseJSONFile(subcatalog_path)
        for name, entry in subcatalog["services"].iteritems():
            AddServiceEntryToCatalog(services, name, entry)

    executable_overrides = {}
    for override_spec in args.executable_override_specs:
        service_name, exe_path = override_spec.split(":", 1)
        executable_overrides[service_name] = exe_path

    for manifest_path in args.embedded_services:
        service_name, manifest = ParseManifest(manifest_path)
        entry = {"embedded": True, "manifest": manifest}
        AddServiceEntryToCatalog(services, service_name, entry)

    for manifest_path in args.standalone_services:
        service_name, manifest = ParseManifest(manifest_path)
        entry = {"embedded": False, "manifest": manifest}
        name = manifest["name"]
        if name in executable_overrides:
            entry["executable"] = executable_overrides[name]
        AddServiceEntryToCatalog(services, service_name, entry)

    catalog = {"services": services}

    with build_utils.AtomicOutput(args.output) as f:
        json.dump(catalog, f, indent=2 if args.pretty else -1)

    # NOTE: We do the sanity check and possible failure *after* outputting the
    # catalog manifest so it's easier to inspect erroneous output.
    SanityCheckCatalog(catalog)

    return 0
Example #4
0
def CheckExpectations(actual_data, options):
    with build_utils.AtomicOutput(options.actual_file) as f:
        f.write(actual_data)
    if options.expected_file_base:
        actual_data = _GenerateDiffWithOnlyAdditons(options.expected_file_base,
                                                    actual_data)
    diff_text = _DiffFileContents(options.expected_file, actual_data)

    if not diff_text:
        return

    fail_msg = """
Expectations need updating:
https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/expecations/README.md

LogDog tip: Use "Raw log" or "Switch to lite mode" before copying:
https://bugs.chromium.org/p/chromium/issues/detail?id=984616

To update expectations, run:
########### START ###########
 patch -p1 <<'END_DIFF'
{}
END_DIFF
############ END ############
""".format(diff_text)

    sys.stderr.write(fail_msg)
    if options.failure_file:
        build_utils.MakeDirectory(os.path.dirname(options.failure_file))
        with open(options.failure_file, 'w') as f:
            f.write(fail_msg)
    if options.fail_on_expectations:
        sys.exit(1)
Example #5
0
def _CreateInfoFile(java_files, jar_path, chromium_code, srcjar_files,
                    classes_dir, generated_java_dir, excluded_globs):
    """Writes a .jar.info file.

  This maps fully qualified names for classes to either the java file that they
  are defined in or the path of the srcjar that they came from.
  """
    output_path = jar_path + '.info'
    logging.info('Start creating info file: %s', output_path)
    javac_generated_sources = _MoveGeneratedJavaFilesToGenDir(
        classes_dir, generated_java_dir)
    logging.info('Finished moving generated java files: %s', output_path)
    # 2 processes saves ~0.9s, 3 processes saves ~1.2s, 4 processes saves ~1.2s.
    pool = multiprocessing.Pool(processes=3)
    results = pool.imap_unordered(_ProcessJavaFileForInfo,
                                  itertools.chain(java_files,
                                                  javac_generated_sources),
                                  chunksize=10)
    pool.close()
    all_info_data = {}
    for java_file, package_name, class_names in results:
        source = srcjar_files.get(java_file, java_file)
        for fully_qualified_name in _ProcessInfo(java_file, package_name,
                                                 class_names, source,
                                                 chromium_code):
            if _ShouldIncludeInJarInfo(fully_qualified_name, excluded_globs):
                all_info_data[fully_qualified_name] = java_file
    logging.info('Writing info file: %s', output_path)
    with build_utils.AtomicOutput(output_path) as f:
        jar_info_utils.WriteJarInfoFile(f, all_info_data, srcjar_files)
    logging.info('Completed info file: %s', output_path)
Example #6
0
    def rebuild():
        logging.info('Building %s', bundle_apks_path)
        with tempfile.NamedTemporaryFile(suffix='.apks') as tmp_apks_file:
            cmd_args = [
                'build-apks',
                '--aapt2=%s' % aapt2_path,
                '--output=%s' % tmp_apks_file.name,
                '--bundle=%s' % bundle_path,
                '--ks=%s' % keystore_path,
                '--ks-pass=pass:%s' % keystore_password,
                '--ks-key-alias=%s' % keystore_alias,
                '--overwrite',
            ]

            if mode is not None:
                if mode not in BUILD_APKS_MODES:
                    raise Exception(
                        'Invalid mode parameter %s (should be in %s)' %
                        (mode, BUILD_APKS_MODES))
                cmd_args += ['--mode=' + mode]

            with tempfile.NamedTemporaryFile(suffix='.json') as spec_file:
                if device_spec:
                    json.dump(device_spec, spec_file)
                    spec_file.flush()
                    cmd_args += ['--device-spec=' + spec_file.name]
                bundletool.RunBundleTool(cmd_args)

            # Make the resulting .apks file hermetic.
            with build_utils.TempDir() as temp_dir, \
              build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f:
                files = build_utils.ExtractAll(tmp_apks_file.name, temp_dir)
                build_utils.DoZip(files, f, base_dir=temp_dir)
Example #7
0
def _VerifyManifest(actual_manifest, expected_file, normalized_manifest,
                    expected_manifest_base_expectation,
                    unexpected_manifest_failure_file, fail_on_mismatch):
  with build_utils.AtomicOutput(normalized_manifest) as normalized_output:
    normalized_output.write(manifest_utils.NormalizeManifest(actual_manifest))

  if expected_manifest_base_expectation:
    with tempfile.NamedTemporaryFile() as generated_diff:
      actual_diff_content = diff_utils.GenerateDiffWithOnlyAdditons(
          expected_manifest_base_expectation, normalized_manifest)
      generated_diff.write(actual_diff_content)
      generated_diff.flush()

      msg = diff_utils.DiffFileContents(expected_file, generated_diff.name)
  else:
    msg = diff_utils.DiffFileContents(expected_file, normalized_manifest)

  if not msg:
    return

  msg_header = """\
AndroidManifest.xml expectations file needs updating. For details see:
https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README.md
"""
  sys.stderr.write(msg_header)
  sys.stderr.write(msg)
  if unexpected_manifest_failure_file:
    build_utils.MakeDirectory(os.path.dirname(unexpected_manifest_failure_file))
    with open(unexpected_manifest_failure_file, 'w') as f:
      f.write(msg_header)
      f.write(msg)
  if fail_on_mismatch:
    sys.exit(1)
 def rebuild():
     logging.info('Building %s', bundle_apks_path)
     with tempfile.NamedTemporaryFile(suffix='.json') as spec_file, \
         build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f:
         cmd_args = [
             'build-apks',
             '--aapt2=%s' % aapt2_path,
             '--output=%s' % f.name,
             '--bundle=%s' % bundle_path,
             '--ks=%s' % keystore_path,
             '--ks-pass=pass:%s' % keystore_password,
             '--ks-key-alias=%s' % keystore_alias,
             '--overwrite',
         ]
         if device_spec:
             json.dump(device_spec, spec_file)
             spec_file.flush()
             cmd_args += ['--device-spec=' + spec_file.name]
         if mode is not None:
             if mode not in BUILD_APKS_MODES:
                 raise Exception(
                     'Invalid mode parameter %s (should be in %s)' %
                     (mode, BUILD_APKS_MODES))
             cmd_args += ['--mode=' + mode]
         bundletool.RunBundleTool(cmd_args)
Example #9
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument('--metadata',
                        required=True,
                        help='emoji metadata ordering file as JSON')
    parser.add_argument('--output',
                        required=True,
                        help='output JSON file path')
    options = parser.parse_args(args)

    metadata_file = options.metadata
    output_file = options.output

    # Parse emoticon ordering data.
    metadata = []
    with open(metadata_file, 'r') as file:
        metadata = json.load(file)

    emoticon_data = process_emoticon_data(metadata)

    # Write output file atomically in utf-8 format.
    with build_utils.AtomicOutput(output_file) as tmp_file:
        tmp_file.write(
            json.dumps(emoticon_data,
                       separators=(',', ':'),
                       ensure_ascii=False).encode('utf-8'))
Example #10
0
def _MergeJarInfoFiles(output, inputs):
    """Merge several .jar.info files to generate an .apk.jar.info.

  Args:
    output: output file path.
    inputs: List of .info.jar or .jar files.
  """
    info_data = dict()
    for path in inputs:
        # android_java_prebuilt adds jar files in the src directory (relative to
        #     the output directory, usually ../../third_party/example.jar).
        # android_aar_prebuilt collects jar files in the aar file and uses the
        #     java_prebuilt rule to generate gen/example/classes.jar files.
        # We scan these prebuilt jars to parse each class path for the FQN. This
        #     allows us to later map these classes back to their respective src
        #     directories.
        # TODO(agrieve): This should probably also check that the mtime of the .info
        #     is newer than that of the .jar, or change prebuilts to always output
        #     .info files so that they always exist (and change the depfile to
        #     depend directly on them).
        if path.endswith('.info'):
            info_data.update(jar_info_utils.ParseJarInfoFile(path))
        else:
            with zipfile.ZipFile(path) as zip_info:
                for name in zip_info.namelist():
                    fully_qualified_name = _FullJavaNameFromClassFilePath(name)
                    if fully_qualified_name:
                        info_data[fully_qualified_name] = '{}/{}'.format(
                            path, name)

    # only_if_changed=False since no build rules depend on this as an input.
    with build_utils.AtomicOutput(output, only_if_changed=False) as f:
        jar_info_utils.WriteJarInfoFile(f, info_data)
Example #11
0
def main(argv):
    option_parser = optparse.OptionParser()
    option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
    option_parser.add_option('--imports', help='Files to import.')
    option_parser.add_option('--includes',
                             help='Directories to add as import search paths.')
    option_parser.add_option('--srcjar', help='Path for srcjar output.')
    options, args = option_parser.parse_args(argv[1:])

    with build_utils.TempDir() as temp_dir:
        for f in args:
            classname = os.path.splitext(os.path.basename(f))[0]
            output = os.path.join(temp_dir, classname + '.java')
            aidl_cmd = [options.aidl_path]
            aidl_cmd += [
                '-p' + s for s in build_utils.ParseGnList(options.imports)
            ]
            if options.includes is not None:
                aidl_cmd += [
                    '-I' + s for s in build_utils.ParseGnList(options.includes)
                ]
            aidl_cmd += [f, output]
            build_utils.CheckOutput(aidl_cmd)

        with build_utils.AtomicOutput(options.srcjar) as f:
            with zipfile.ZipFile(f, 'w') as srcjar:
                for path in build_utils.FindInDirectory(temp_dir, '*.java'):
                    with open(path) as fileobj:
                        data = fileobj.read()
                    pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data,
                                         re.M).group(1)
                    arcname = '%s/%s' % (pkg_name.replace(
                        '.', '/'), os.path.basename(path))
                    build_utils.AddToZipHermetic(srcjar, arcname, data=data)
Example #12
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--input', required=True, help='Input zip file.')
    parser.add_argument('--output', required=True, help='Output zip file')
    parser.add_argument('--exclude-globs', help='GN list of exclude globs')
    parser.add_argument('--include-globs', help='GN list of include globs')
    parser.add_argument(
        '--strip-resource-classes-for',
        help='GN list of java package names exclude R.class files in.')

    argv = build_utils.ExpandFileArgs(sys.argv[1:])
    args = parser.parse_args(argv)

    args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
    args.include_globs = build_utils.ParseGnList(args.include_globs)
    args.strip_resource_classes_for = build_utils.ParseGnList(
        args.strip_resource_classes_for)

    path_transform = CreatePathTransform(args.exclude_globs,
                                         args.include_globs,
                                         args.strip_resource_classes_for)
    with build_utils.AtomicOutput(args.output) as f:
        if path_transform:
            build_utils.MergeZips(f.name, [args.input],
                                  path_transform=path_transform)
        else:
            shutil.copy(args.input, f.name)
def main(args):

    parser = argparse.ArgumentParser()
    parser.add_argument('--script-path', help='Path to the wrapped script.')
    parser.add_argument('--script-output-path',
                        help='Path to the output script.')
    group = parser.add_argument_group('Path arguments')
    group.add_argument('--output-directory')
    group.add_argument('--packed-libs')

    args, script_args = parser.parse_known_args(
        build_utils.ExpandFileArgs(args))

    def relativize(p):
        return os.path.relpath(p, os.path.dirname(args.script_output_path))

    script_path = relativize(args.script_path)

    script_path_args = []
    if args.output_directory:
        script_path_args.append(
            ('--output-directory', relativize(args.output_directory)))
    if args.packed_libs:
        for p in build_utils.ParseGnList(args.packed_libs):
            script_path_args.append(('--packed-lib', relativize(p)))

    with build_utils.AtomicOutput(args.script_output_path) as script:
        script.write(
            SCRIPT_TEMPLATE.format(script_path=script_path,
                                   script_args=script_args,
                                   script_path_args=script_path_args))

    os.chmod(args.script_output_path, 0750)

    return 0
def main():
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    build_utils.AddDepfileOption(parser)
    parser.add_argument('--locale-list',
                        required=True,
                        help='GN-list of Chrome-specific locale names.')
    parser.add_argument('--output-zip',
                        required=True,
                        help='Output zip archive path.')

    args = parser.parse_args()

    locale_list = build_utils.ParseGnList(args.locale_list)
    if not locale_list:
        raise Exception('Locale list cannot be empty!')

    with build_utils.AtomicOutput(args.output_zip) as tmp_file:
        with zipfile.ZipFile(tmp_file, 'w') as out_zip:
            # First, write the default value, since aapt requires one.
            _AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE)

            for locale in locale_list:
                android_locale = resource_utils.ToAndroidLocaleName(locale)
                _AddLocaleResourceFileToZip(out_zip, android_locale, locale)

    if args.depfile:
        build_utils.WriteDepfile(args.depfile, args.output_zip)
Example #15
0
def main(argv):
  """Pack a list of files into a zip archive.

  Args:
    output: The file path of the zip archive.
    base_dir: Base path of input files.
    languages: Comma-separated list of languages, e.g. en-US,de.
    add: List of files to include in the archive. The language placeholder
         ${lang} is expanded into one file for each language.
  """
  parser = optparse.OptionParser()
  parser.add_option("--output", dest="output")
  parser.add_option("--base_dir", dest="base_dir")
  parser.add_option("--languages", dest="languages")
  parser.add_option("--add", action="append", dest="files", default=[])
  options, args = parser.parse_args(argv[1:])

  # Process file list, possibly expanding language placeholders.
  _LANG_PLACEHOLDER = "${lang}"
  languages = filter(bool, options.languages.split(','))
  file_list = []
  for file_to_add in options.files:
    if (_LANG_PLACEHOLDER in file_to_add):
      for lang in languages:
        file_list.append(file_to_add.replace(_LANG_PLACEHOLDER, lang))
    else:
      file_list.append(file_to_add)

  with build_utils.AtomicOutput(options.output) as f:
    build_utils.DoZip(file_list, f, options.base_dir)
Example #16
0
 def WriteRTxtFile(self, rtxt_path):
     resources = self._CollectResourcesListFromDirectories()
     with build_utils.AtomicOutput(rtxt_path) as f:
         for resource in resources:
             line = '{0.java_type} {0.resource_type} {0.name} {0.value}\n'.format(
                 resource)
             f.write(line)
Example #17
0
def _CreateInfoFile(java_files, options, srcjar_files):
  """Writes a .jar.info file.

  This maps fully qualified names for classes to either the java file that they
  are defined in or the path of the srcjar that they came from.

  For apks this also produces a coalesced .apk.jar.info file combining all the
  .jar.info files of its transitive dependencies.
  """
  info_data = dict()
  for java_file in java_files:
    package_name, class_names = _ParsePackageAndClassNames(java_file)
    for class_name in class_names:
      fully_qualified_name = '{}.{}'.format(package_name, class_name)
      info_data[fully_qualified_name] = java_file
    # Skip aidl srcjars since they don't indent code correctly.
    source = srcjar_files.get(java_file, java_file)
    if '_aidl.srcjar' in source:
      continue
    assert not options.chromium_code or len(class_names) == 1, (
        'Chromium java files must only have one class: {}'.format(source))
    if options.chromium_code:
      _CheckPathMatchesClassName(java_file, package_name, class_names[0])
  with build_utils.AtomicOutput(options.jar_path + '.info') as f:
    jar_info_utils.WriteJarInfoFile(f.name, info_data, srcjar_files)
def _MergeResInfoFiles(res_info_path, info_paths):
    # Concatenate them all.
    # only_if_changed=False since no build rules depend on this as an input.
    with build_utils.AtomicOutput(res_info_path, only_if_changed=False) as dst:
        for p in info_paths:
            with open(p) as src:
                dst.writelines(_TransformAarPaths(l) for l in src)
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument('--input', required=True, help='Input java file path.')
    parser.add_argument('--output',
                        required=True,
                        help='Output java file path.')
    options = parser.parse_args(args)

    with open(options.input, 'r') as f:
        content = f.read()

    java_ast = javalang.parse.parse(content)
    assert len(
        java_ast.types) == 1, 'Can only process Java files with one class'
    clazz = java_ast.types[0]
    import_dict = _ParseImports(java_ast.imports)
    signature_types = _ParsePublicMethodsSignatureTypes(clazz)
    formatted_public_methods = _FormatPublicMethods(clazz)
    formatted_imports = _FilterAndFormatImports(import_dict, signature_types)

    file_dict = {
        'YEAR': str(datetime.date.today().year),
        'SCRIPT_NAME': _GetScriptName(),
        'PACKAGE': java_ast.package.name,
        'IMPORTS': '\n'.join(formatted_imports),
        'MODIFIERS': ' '.join(clazz.modifiers),
        'CLASS_NAME': clazz.name,
        'METHODS': '\n'.join(['    ' + m for m in formatted_public_methods])
    }
    with build_utils.AtomicOutput(options.output) as f:
        f.write(_FILE_TEMPLATE.format(**file_dict))
Example #20
0
def _WriteXmlFile(root, path):
  build_utils.MakeDirectory(os.path.dirname(path))
  with build_utils.AtomicOutput(path) as f:
    # Although we can write it just with ElementTree.tostring, using minidom
    # makes it a lot easier to read as a human (also on code search).
    f.write(
        minidom.parseString(ElementTree.tostring(
            root, encoding='utf-8')).toprettyxml(indent='  '))
def _Generate(java_file_paths, srcjar_path, header_path=None, namespace=''):
    """Generates files required to perform JNI registration.

  Generates a srcjar containing a single class, GEN_JNI, that contains all
  native method declarations.

  Optionally generates a header file that provides functions
  (RegisterMainDexNatives and RegisterNonMainDexNatives) to perform
  JNI registration.

  Args:
    java_file_paths: A list of java file paths.
    srcjar_path: Path to the GEN_JNI srcjar.
    header_path: If specified, generates a header file in this location.
    namespace: If specified, sets the namespace for the generated header file.
  """
    # Without multiprocessing, script takes ~13 seconds for chrome_public_apk
    # on a z620. With multiprocessing, takes ~2 seconds.
    pool = multiprocessing.Pool()
    results = [
        d for d in pool.imap_unordered(_DictForPath, java_file_paths) if d
    ]
    pool.close()

    # Sort to make output deterministic.
    results.sort(key=lambda d: d['FULL_CLASS_NAME'])

    if header_path:
        combined_dict = {}
        for key in MERGEABLE_KEYS:
            combined_dict[key] = ''.join(d.get(key, '') for d in results)

        combined_dict['HEADER_GUARD'] = \
            os.path.splitext(header_path)[0].replace('/', '_').upper() + '_'
        combined_dict['NAMESPACE'] = namespace

        header_content = CreateFromDict(combined_dict)
        with build_utils.AtomicOutput(header_path) as f:
            f.write(header_content)

    with build_utils.AtomicOutput(srcjar_path) as f:
        with zipfile.ZipFile(f, 'w') as srcjar:
            # TODO(abenner): Write GEN_JNI.java here.
            # build_utils.AddToZipHermetic(srcjar, 'org/chromium/base/GEN_JNI.java',
            #     data='$CONTENT')
            pass
def _MergePakInfoFiles(merged_path, pak_infos):
    info_lines = set()
    for pak_info_path in pak_infos:
        with open(pak_info_path, 'r') as src_info_file:
            info_lines.update(_TransformAarPaths(x) for x in src_info_file)
    # only_if_changed=False since no build rules depend on this as an input.
    with build_utils.AtomicOutput(merged_path, only_if_changed=False) as f:
        f.writelines(sorted(info_lines))
Example #23
0
def ApplyTemplate(path_to_template, output_path, global_vars, **kwargs):
    with build_utils.AtomicOutput(output_path) as output_file:
        jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(
            os.path.dirname(__file__)),
                                       keep_trailing_newline=True,
                                       **kwargs)
        jinja_env.globals.update(global_vars)
        output_file.write(jinja_env.get_template(path_to_template).render())
def main(args):
    options = _ParseArgs(args)
    with open(options.input, 'r') as f:
        content = f.read()
    _EnsureSimpleFactory(content)
    replaced_content = _ReplaceMethodBodies(content)
    with build_utils.AtomicOutput(options.output) as f:
        f.write(replaced_content)
Example #25
0
def main():
    # The point of this wrapper is to use AtomicOutput so that output timestamps
    # are not updated when outputs are unchanged.
    ijar_bin, in_jar, out_jar = sys.argv[1:]
    with build_utils.AtomicOutput(out_jar) as f:
        stderr_filter = (
            lambda x: build_utils.FilterLines(x, r'Passing class through'))
        build_utils.CheckOutput([ijar_bin, in_jar, f.name],
                                stderr_filter=stderr_filter)
Example #26
0
def main(args):
    args = build_utils.ExpandFileArgs(args)
    options = _ParseOptions(args)

    input_paths = []
    for feature_jars in options.features.itervalues():
        for feature_jar in feature_jars:
            input_paths.append(feature_jar)

    with build_utils.TempDir() as dexsplitter_output_dir:
        curr_location_to_dest = []
        if len(options.features) == 1:
            # Don't run dexsplitter since it needs at least 1 feature module.
            curr_location_to_dest.append(
                (options.input_dex_zip, options.dex_dests[0]))
        else:
            _RunDexsplitter(options, dexsplitter_output_dir)

            for i, dest in enumerate(options.dex_dests):
                module_dex_file = os.path.join(dexsplitter_output_dir,
                                               options.feature_names[i],
                                               'classes.dex')
                if os.path.exists(module_dex_file):
                    curr_location_to_dest.append((module_dex_file, dest))
                else:
                    module_dex_file += '.zip'
                    assert os.path.exists(
                        module_dex_file), 'Dexsplitter tool output not found.'
                    curr_location_to_dest.append(
                        (module_dex_file + '.zip', dest))

        for curr_location, dest in curr_location_to_dest:
            with build_utils.AtomicOutput(dest) as f:
                if curr_location.endswith('.zip'):
                    if dest.endswith('.zip'):
                        shutil.move(curr_location, f.name)
                    else:
                        with zipfile.ZipFile(curr_location, 'r') as z:
                            namelist = z.namelist()
                            assert len(namelist) == 1, (
                                'Unzipping to single dex file, but not single dex file in '
                                + options.input_dex_zip)
                            z.extract(namelist[0], f.name)
                else:
                    if dest.endswith('.zip'):
                        build_utils.ZipDir(
                            f.name,
                            os.path.abspath(
                                os.path.join(curr_location, os.pardir)))
                    else:
                        shutil.move(curr_location, f.name)

    build_utils.Touch(options.stamp)
    build_utils.WriteDepfile(options.depfile,
                             options.stamp,
                             inputs=input_paths)
Example #27
0
def main(args):
    args = build_utils.ExpandFileArgs(args)
    parser = argparse.ArgumentParser(args)
    parser.add_argument('--input-files', help='GN-list of files to zip.')
    parser.add_argument(
        '--input-files-base-dir',
        help='Paths in the archive will be relative to this directory')
    parser.add_argument('--input-zips', help='GN-list of zips to merge.')
    parser.add_argument('--input-zips-excluded-globs',
                        help='GN-list of globs for paths to exclude.')
    parser.add_argument('--output',
                        required=True,
                        help='Path to output archive.')
    compress_group = parser.add_mutually_exclusive_group()
    compress_group.add_argument('--compress',
                                action='store_true',
                                help='Compress entries')
    compress_group.add_argument('--no-compress',
                                action='store_false',
                                dest='compress',
                                help='Do not compress entries')
    build_utils.AddDepfileOption(parser)
    options = parser.parse_args(args)

    with build_utils.AtomicOutput(options.output) as f:
        with zipfile.ZipFile(f.name, 'w') as out_zip:
            depfile_deps = None
            if options.input_files:
                files = build_utils.ParseGnList(options.input_files)
                build_utils.DoZip(files,
                                  out_zip,
                                  base_dir=options.input_files_base_dir,
                                  compress_fn=lambda _: options.compress)

            if options.input_zips:
                files = build_utils.ParseGnList(options.input_zips)
                depfile_deps = files
                path_transform = None
                if options.input_zips_excluded_globs:
                    globs = build_utils.ParseGnList(
                        options.input_zips_excluded_globs)
                    path_transform = (
                        lambda p: None
                        if build_utils.MatchesGlob(p, globs) else p)
                build_utils.MergeZips(out_zip,
                                      files,
                                      path_transform=path_transform,
                                      compress=options.compress)

    # Depfile used only by dist_jar().
    if options.depfile:
        build_utils.WriteDepfile(options.depfile,
                                 options.output,
                                 inputs=depfile_deps,
                                 add_pydeps=False)
Example #28
0
def main():
  # The point of this wrapper is to use AtomicOutput so that output timestamps
  # are not updated when outputs are unchanged.
  ijar_bin, in_jar, out_jar = sys.argv[1:]
  with build_utils.AtomicOutput(out_jar) as f:
    # ijar fails on empty jars: https://github.com/bazelbuild/bazel/issues/10162
    if os.path.getsize(in_jar) <= _EMPTY_JAR_SIZE:
      with open(in_jar, 'rb') as in_f:
        f.write(in_f.read())
    else:
      build_utils.CheckOutput([ijar_bin, in_jar, f.name])
Example #29
0
def main():
    parser = argparse.ArgumentParser()
    build_utils.AddDepfileOption(parser)
    parser.add_argument('--module', required=True, help='The module name.')
    parser.add_argument('--libraries-file',
                        required=True,
                        help='Path to file with GN list of library paths')
    parser.add_argument('--paks', help='GN list of PAK file paths')
    parser.add_argument('--output',
                        required=True,
                        help='Path to the generated srcjar file.')
    parser.add_argument(
        '--load-native-on-get-impl',
        action='store_true',
        default=False,
        help='Load module automatically on calling Module.getImpl().')
    options = parser.parse_args()
    options.paks = build_utils.ParseGnList(options.paks)

    with open(options.libraries_file) as f:
        libraries_list = build_utils.ParseGnList(f.read())

    libraries = []
    for path in libraries_list:
        path = path.strip()
        filename = os.path.split(path)[1]
        assert filename.startswith('lib')
        assert filename.endswith('.so')
        # Remove lib prefix and .so suffix.
        libraries += [filename[3:-3]]
    paks = options.paks if options.paks else []

    format_dict = {
        'MODULE':
        options.module,
        'LIBRARIES':
        ','.join(['"%s"' % l for l in libraries]),
        'PAKS':
        ','.join(['"%s"' % os.path.basename(p) for p in paks]),
        'LOAD_NATIVE_ON_GET_IMPL':
        ('true' if options.load_native_on_get_impl else 'false'),
    }
    with build_utils.AtomicOutput(options.output) as f:
        with zipfile.ZipFile(f.name, 'w') as srcjar_file:
            build_utils.AddToZipHermetic(
                srcjar_file,
                'org/chromium/components/module_installer/builder/'
                'ModuleDescriptor_%s.java' % options.module,
                data=_TEMPLATE.format(**format_dict))

    if options.depfile:
        build_utils.WriteDepfile(options.depfile,
                                 options.output,
                                 inputs=[options.libraries_file])
Example #30
0
def _CreateResourceInfoFile(renamed_paths, apk_info_path,
                            dependencies_res_zips):
    lines = set()
    for zip_file in dependencies_res_zips:
        zip_info_file_path = zip_file + '.info'
        if os.path.exists(zip_info_file_path):
            with open(zip_info_file_path, 'r') as zip_info_file:
                lines.update(zip_info_file.readlines())
    for dest, source in renamed_paths.iteritems():
        lines.add('Rename:{},{}\n'.format(dest, source))
    with build_utils.AtomicOutput(apk_info_path) as info_file:
        info_file.writelines(sorted(lines))