Exemplo n.º 1
0
def main():
  parser = optparse.OptionParser()
  build_utils.AddDepfileOption(parser)
  parser.add_option('--src-dir', action="append",
      help='Directory containing .java files.')
  parser.add_option('--jar-path', help='Jar output path.')
  parser.add_option('--stamp', help='Path to touch on success.')

  options, _ = parser.parse_args()

  src_dirs = []
  for src_dir in options.src_dir:
    src_dirs.extend(build_utils.ParseGypList(src_dir))

  for src_dir in src_dirs:
    JarSources(src_dir, options.jar_path)

  if options.depfile:
    input_paths = []
    for src_dir in src_dirs:
      for root, _, filenames in os.walk(src_dir):
        input_paths.extend(os.path.join(root, f) for f in filenames)
    build_utils.WriteDepfile(options.depfile,
                             input_paths + build_utils.GetPythonDependencies())

  if options.stamp:
    build_utils.Touch(options.stamp)
Exemplo n.º 2
0
def main():
    parser = optparse.OptionParser()
    build_utils.AddDepfileOption(parser)
    parser.add_option('--output-dir', help='Directory to put javadoc')
    parser.add_option('--input-dir', help='Root of cronet source')
    parser.add_option('--input-src-jar', help='Cronet api source jar')
    parser.add_option('--overview-file', help='Path of the overview page')
    parser.add_option('--readme-file', help='Path of the README.md')
    parser.add_option('--lib-java-dir', help='Directory containing java libs')

    options, _ = parser.parse_args()
    # A temporary directory to put the output of cronet api source jar files.
    unzipped_jar_path = tempfile.mkdtemp(dir=options.output_dir)
    if os.path.exists(options.input_src_jar):
        jar_cmd = ['jar', 'xf', os.path.abspath(options.input_src_jar)]
        build_utils.CheckOutput(jar_cmd, cwd=unzipped_jar_path)
    else:
        raise Exception('Jar file does not exist: %s' % options.input_src_jar)

    net_docs.ProcessDocs([options.readme_file],
                         options.input_dir,
                         options.output_dir,
                         extensions=[CronetExtension()])

    GenerateJavadoc(options, os.path.abspath(unzipped_jar_path))

    if options.depfile:
        input_paths = []
        for root, _, filenames in os.walk(options.input_dir):
            input_paths.extend(os.path.join(root, f) for f in filenames)
        build_utils.WriteDepfile(
            options.depfile, input_paths + build_utils.GetPythonDependencies())
    # Clean up temporary output directory.
    build_utils.DeleteDirectory(unzipped_jar_path)
Exemplo n.º 3
0
def main():
    class FormatterWithNewLines(optparse.IndentedHelpFormatter):
        def format_description(self, description):
            paras = description.split('\n')
            formatted_paras = [
                textwrap.fill(para, self.width) for para in paras
            ]
            return '\n'.join(formatted_paras) + '\n'

    parser = optparse.OptionParser(formatter=FormatterWithNewLines(),
                                   usage='%prog [options]')
    parser.add_option('--json', help='Path to JSON output file')
    build_utils.AddDepfileOption(parser)
    parser.description = (__doc__ + '\nCommands:\n'
                          '  scan Check licenses.\n'
                          'Android NOTICE file.\n'
                          '  notice [file] Generate Android NOTICE file on '
                          'stdout or into |file|.\n'
                          '  display_copyrights Display autorship on the files'
                          ' using names provided via stdin.\n')
    options, args = parser.parse_args()
    if len(args) < 1:
        parser.print_help()
        return ScanResult.Errors

    if args[0] == 'scan':
        scan_result, problem_paths = _Scan()
        if scan_result == ScanResult.Ok:
            print 'OK!'
        if options.json:
            with open(options.json, 'w') as f:
                json.dump(problem_paths, f)
        return scan_result
    elif args[0] == 'notice':
        license_file_list, notice_file_contents = GenerateNoticeFile()
        if len(args) == 1:
            print notice_file_contents
        else:
            with open(args[1], 'w') as output_file:
                output_file.write(notice_file_contents)
        if options.depfile:
            assert args[1]
            # Add in build.ninja so that the target will be considered dirty whenever
            # gn gen is run. Otherwise, it will fail to notice new files being added.
            # This is still no perfect, as it will fail if no build files are changed,
            # but a new README.chromium / LICENSE is added. This shouldn't happen in
            # practice however.
            build_utils.WriteDepfile(options.depfile, args[1],
                                     license_file_list + ['build.ninja'])

        return ScanResult.Ok
    elif args[0] == 'display_copyrights':
        files = sys.stdin.read().splitlines()
        for f, c in \
            zip(files, copyright_scanner.FindCopyrights(InputApi(), '.', files)):
            print f, '\t', ' / '.join(sorted(c))
        return ScanResult.Ok
    parser.print_help()
    return ScanResult.Errors
Exemplo n.º 4
0
def main():
    parser = optparse.OptionParser()
    build_utils.AddDepfileOption(parser)
    parser.add_option('--src-dir',
                      action="append",
                      help='Directory containing .java files.')
    parser.add_option(
        '--src-jars',
        action="append",
        help='A list of source jars to include in addition to source files.')
    parser.add_option('--jar-path', help='Jar output path.')
    parser.add_option('--stamp', help='Path to touch on success.')

    options, _ = parser.parse_args()

    # A temporary directory to put the output of jar files.
    unzipped_jar_path = None
    if options.src_jars:
        unzipped_jar_path = tempfile.mkdtemp(
            dir=os.path.dirname(options.jar_path))
        jar_list = []
        for gn_list in options.src_jars:
            jar_list.extend(build_utils.ParseGnList(gn_list))

        for jar in jar_list:
            UnzipSourceJar(jar, unzipped_jar_path)

    src_dirs = []
    for src_dir in options.src_dir:
        src_dirs.extend(build_utils.ParseGnList(src_dir))
    if unzipped_jar_path:
        src_dirs += [unzipped_jar_path]

    for src_dir in src_dirs:
        JarSources(src_dir, options.jar_path)

    if options.depfile:
        input_paths = []
        for src_dir in src_dirs:
            for root, _, filenames in os.walk(src_dir):
                input_paths.extend(os.path.join(root, f) for f in filenames)
        build_utils.WriteDepfile(
            options.depfile, input_paths + build_utils.GetPythonDependencies())
    # Clean up temporary output directory.
    if unzipped_jar_path:
        build_utils.DeleteDirectory(unzipped_jar_path)

    if options.stamp:
        build_utils.Touch(options.stamp)
def main():
    parser = optparse.OptionParser()
    build_utils.AddDepfileOption(parser)
    parser.add_option('--project-dir', help='Gradle project directory.')
    parser.add_option('--stamp', help='Path to touch on success.')

    options, _ = parser.parse_args()

    BuildWithGradle(options)

    if options.depfile:
        assert options.stamp
        build_utils.WriteDepfile(options.depfile, options.stamp)

    if options.stamp:
        build_utils.Touch(options.stamp)
Exemplo n.º 6
0
def main():
    parser = optparse.OptionParser()
    build_utils.AddDepfileOption(parser)
    parser.add_option('--classes-dir',
                      help='Directory to extract .class files.')
    parser.add_option('--jars', help='Paths to jars to extract.')
    parser.add_option('--stamp', help='Path to touch on success.')

    options, _ = parser.parse_args()

    ExtractJars(options)

    if options.depfile:
        assert options.stamp
        build_utils.WriteDepfile(options.depfile, options.stamp)

    if options.stamp:
        build_utils.Touch(options.stamp)
Exemplo n.º 7
0
def main():
    parser = optparse.OptionParser()
    build_utils.AddDepfileOption(parser)
    parser.add_option('--output-dir', help='Directory to put javadoc')
    parser.add_option('--input-dir', help='Root of cronet source')
    parser.add_option('--input-src-jar', help='Cronet api source jar')
    parser.add_option('--overview-file', help='Path of the overview page')
    parser.add_option('--readme-file', help='Path of the README.md')
    parser.add_option('--zip-file', help='Path to ZIP archive of javadocs.')
    parser.add_option('--android-sdk-jar', help='Path to android.jar')

    options, _ = parser.parse_args()
    # A temporary directory to put the output of cronet api source jar files.
    unzipped_jar_path = tempfile.mkdtemp(dir=options.output_dir)
    if os.path.exists(options.input_src_jar):
        jar_cmd = ['jar', 'xf', os.path.abspath(options.input_src_jar)]
        build_utils.CheckOutput(jar_cmd, cwd=unzipped_jar_path)
    else:
        raise Exception('Jar file does not exist: %s' % options.input_src_jar)

    net_docs.ProcessDocs([options.readme_file],
                         options.input_dir,
                         options.output_dir,
                         extensions=[CronetExtension()])

    output_dir = os.path.abspath(os.path.join(options.output_dir, 'javadoc'))
    GenerateJavadoc(options, os.path.abspath(unzipped_jar_path), output_dir)

    if options.zip_file:
        assert options.zip_file.endswith('.zip')
        shutil.make_archive(options.zip_file[:-4], 'zip', output_dir)
    if options.depfile:
        assert options.zip_file
        deps = []
        for root, _, filenames in os.walk(options.input_dir):
            # Ignore .pyc files here, it might be re-generated during build.
            deps.extend(
                os.path.join(root, f) for f in filenames
                if not f.endswith('.pyc'))
        build_utils.WriteDepfile(options.depfile, options.zip_file, deps)
    # Clean up temporary output directory.
    build_utils.DeleteDirectory(unzipped_jar_path)
Exemplo n.º 8
0
def main():
  parser = optparse.OptionParser()
  build_utils.AddDepfileOption(parser)
  parser.add_option('--output-dir', help='Directory to put javadoc')
  parser.add_option('--input-dir', help='Root of cronet source')
  parser.add_option('--overview-file', help='Path of the overview page')
  parser.add_option('--readme-file', help='Path of the README.md')

  options, _ = parser.parse_args()

  net_docs.ProcessDocs([options.readme_file], options.input_dir,
                       options.output_dir, extensions=[CronetExtension()])

  GenerateJavadoc(options)

  if options.depfile:
    input_paths = []
    for root, _, filenames in os.walk(options.input_dir):
      input_paths.extend(os.path.join(root, f) for f in filenames)
    build_utils.WriteDepfile(options.depfile,
                             input_paths + build_utils.GetPythonDependencies())
Exemplo n.º 9
0
def GenerateCredits(file_template_file,
                    entry_template_file,
                    output_file,
                    target_os,
                    gn_out_dir,
                    gn_target,
                    depfile=None):
    """Generate about:credits."""
    def EvaluateTemplate(template, env, escape=True):
        """Expand a template with variables like {{foo}} using a
        dictionary of expansions."""
        for key, val in env.items():
            if escape:
                val = cgi.escape(val)
            template = template.replace('{{%s}}' % key, val)
        return template

    if gn_target:
        third_party_dirs = FindThirdPartyDeps(gn_out_dir, gn_target)

        # Sanity-check to raise a build error if invalid gn_... settings are
        # somehow passed to this script.
        if not third_party_dirs:
            raise RuntimeError("No deps found.")
    else:
        third_party_dirs = FindThirdPartyDirs(PRUNE_PATHS, _REPOSITORY_ROOT)

    if not file_template_file:
        file_template_file = os.path.join(_REPOSITORY_ROOT, 'components',
                                          'about_ui', 'resources',
                                          'about_credits.tmpl')
    if not entry_template_file:
        entry_template_file = os.path.join(_REPOSITORY_ROOT, 'components',
                                           'about_ui', 'resources',
                                           'about_credits_entry.tmpl')

    entry_template = open(entry_template_file).read()
    entries = []
    for path in third_party_dirs:
        try:
            metadata = ParseDir(path, _REPOSITORY_ROOT)
        except LicenseError:
            # TODO(phajdan.jr): Convert to fatal error (http://crbug.com/39240).
            continue
        if metadata['License File'] == NOT_SHIPPED:
            continue
        if target_os == 'ios' and not gn_target:
            # Skip over files that are known not to be used on iOS. But
            # skipping is unnecessary if GN was used to query the actual
            # dependencies.
            # TODO(lambroslambrou): Remove this step once the iOS build is
            # updated to provide --gn-target to this script.
            if path in KNOWN_NON_IOS_LIBRARIES:
                continue
        env = {
            'name': metadata['Name'],
            'url': metadata['URL'],
            'license': open(metadata['License File'], 'rb').read(),
        }
        entry = {
            'name': metadata['Name'],
            'content': EvaluateTemplate(entry_template, env),
            'license_file': metadata['License File'],
        }
        entries.append(entry)

    entries.sort(key=lambda entry: (entry['name'], entry['content']))
    entries_contents = '\n'.join([entry['content'] for entry in entries])
    file_template = open(file_template_file).read()
    template_contents = "<!-- Generated by licenses.py; do not edit. -->"
    template_contents += EvaluateTemplate(file_template,
                                          {'entries': entries_contents},
                                          escape=False)

    if output_file:
        with open(output_file, 'w') as output:
            output.write(template_contents)
    else:
        print template_contents

    if depfile:
        assert output_file
        # Add in build.ninja so that the target will be considered dirty whenever
        # gn gen is run. Otherwise, it will fail to notice new files being added.
        # This is still no perfect, as it will fail if no build files are changed,
        # but a new README.chromium / LICENSE is added. This shouldn't happen in
        # practice however.
        license_file_list = (entry['license_file'] for entry in entries)
        license_file_list = (os.path.relpath(p) for p in license_file_list)
        license_file_list = sorted(set(license_file_list))
        build_utils.WriteDepfile(depfile, output_file,
                                 license_file_list + ['build.ninja'])

    return True
Exemplo n.º 10
0
def main():
    parser = argparse.ArgumentParser()
    build_utils.AddDepfileOption(parser)
    parser.add_argument(
        '--excluded-classes',
        help='A list of .class file patterns to exclude from the jar.')
    parser.add_argument('--src-search-dirs',
                        action='append',
                        help='A list of directories that should be searched'
                        ' for the source files.')
    parser.add_argument('--src-files',
                        action='append',
                        help='A list of source files to jar.')
    parser.add_argument(
        '--src-jars',
        action='append',
        help='A list of source jars to include in addition to source files.')
    parser.add_argument('--src-list-files',
                        action='append',
                        help='A list of files that contain a list of sources,'
                        ' e.g. a list of \'.sources\' files generated by GN.')
    parser.add_argument('--jar-path', help='Jar output path.', required=True)

    options = parser.parse_args()

    src_jars = []
    for gn_list in options.src_jars:
        src_jars.extend(build_utils.ParseGnList(gn_list))

    src_search_dirs = []
    for gn_src_search_dirs in options.src_search_dirs:
        src_search_dirs.extend(build_utils.ParseGnList(gn_src_search_dirs))

    src_list_files = []
    if options.src_list_files:
        for gn_src_list_file in options.src_list_files:
            src_list_files.extend(build_utils.ParseGnList(gn_src_list_file))

    src_files = []
    for gn_src_files in options.src_files:
        src_files.extend(build_utils.ParseGnList(gn_src_files))

    # Add files from --source_list_files
    for src_list_file in src_list_files:
        with open(src_list_file, 'r') as f:
            src_files.extend(f.read().splitlines())

    # Preprocess source files by removing any prefix that comes before
    # the Java package name.
    for i, s in enumerate(src_files):
        prefix_position = s.find(JAVA_PACKAGE_PREFIX)
        if prefix_position != -1:
            src_files[i] = s[prefix_position:]

    excluded_classes = []
    if options.excluded_classes:
        classes = build_utils.ParseGnList(options.excluded_classes)
        excluded_classes.extend(f.replace('.class', '.java') for f in classes)

    predicate = None
    if excluded_classes:
        predicate = lambda f: not build_utils.MatchesGlob(f, excluded_classes)

    # Create a dictionary that maps every source directory
    # to source files that it contains.
    dir_to_files_map = {}
    # Initialize the map.
    for src_search_dir in src_search_dirs:
        dir_to_files_map[src_search_dir] = []
    # Fill the map.
    for src_file in src_files:
        number_of_file_instances = 0
        for src_search_dir in src_search_dirs:
            target_path = os.path.join(src_search_dir, src_file)
            if os.path.isfile(target_path):
                number_of_file_instances += 1
                if not predicate or predicate(src_file):
                    dir_to_files_map[src_search_dir].append(target_path)
        if (number_of_file_instances > 1):
            raise Exception(
                'There is more than one instance of file %s in %s' %
                (src_file, src_search_dirs))
        if (number_of_file_instances < 1):
            raise Exception('Unable to find file %s in %s' %
                            (src_file, src_search_dirs))

    # Jar the sources from every source search directory.
    with build_utils.AtomicOutput(options.jar_path) as o, \
        zipfile.ZipFile(o, 'w', zipfile.ZIP_DEFLATED) as z:
        for src_search_dir in src_search_dirs:
            subpaths = dir_to_files_map[src_search_dir]
            if subpaths:
                build_utils.DoZip(subpaths, z, base_dir=src_search_dir)
            else:
                raise Exception(
                    'Directory %s does not contain any files and can be'
                    ' removed from the list of directories to search' %
                    src_search_dir)

        # Jar additional src jars
        if src_jars:
            build_utils.MergeZips(z, src_jars, compress=True)

    if options.depfile:
        deps = []
        for sources in dir_to_files_map.itervalues():
            deps.extend(sources)
        # Srcjar deps already captured in GN rules (no need to list them here).
        build_utils.WriteDepfile(options.depfile, options.jar_path, deps)
Exemplo n.º 11
0
def main():
    parser = optparse.OptionParser()
    build_utils.AddDepfileOption(parser)
    parser.add_option('--src-search-dirs',
                      action="append",
                      help='A list of directories that should be searched'
                      ' for the source files.')
    parser.add_option('--src-files',
                      action="append",
                      help='A list of source files to jar.')
    parser.add_option(
        '--src-jars',
        action="append",
        help='A list of source jars to include in addition to source files.')
    parser.add_option('--src-list-files',
                      action="append",
                      help='A list of files that contain a list of sources,'
                      ' e.g. a list of \'.sources\' files generated by GN.')
    parser.add_option('--jar-path', help='Jar output path.')
    parser.add_option('--stamp', help='Path to touch on success.')

    options, _ = parser.parse_args()

    # A temporary directory to put the output of jar files.
    unzipped_jar_path = None
    if options.src_jars:
        unzipped_jar_path = tempfile.mkdtemp(
            dir=os.path.dirname(options.jar_path))
        jar_list = []
        for gn_list in options.src_jars:
            jar_list.extend(build_utils.ParseGnList(gn_list))

        for jar in jar_list:
            UnzipSourceJar(jar, unzipped_jar_path)

    src_search_dirs = []
    for gn_src_search_dirs in options.src_search_dirs:
        src_search_dirs.extend(build_utils.ParseGnList(gn_src_search_dirs))

    src_list_files = []
    if options.src_list_files:
        for gn_src_list_file in options.src_list_files:
            src_list_files.extend(build_utils.ParseGnList(gn_src_list_file))

    src_files = []
    for gn_src_files in options.src_files:
        src_files.extend(build_utils.ParseGnList(gn_src_files))

    # Add files from --source_list_files
    for src_list_file in src_list_files:
        with open(src_list_file, 'r') as f:
            src_files.extend(f.read().splitlines())

    # Preprocess source files by removing any prefix that comes before
    # the Java package name.
    for i, s in enumerate(src_files):
        prefix_position = s.find(JAVA_PACKAGE_PREFIX)
        if prefix_position != -1:
            src_files[i] = s[prefix_position:]

    # Create a dictionary that maps every source directory
    # to source files that it contains.
    dir_to_files_map = {}
    # Initialize the map.
    for src_search_dir in src_search_dirs:
        dir_to_files_map[src_search_dir] = []
    # Fill the map.
    for src_file in src_files:
        number_of_file_instances = 0
        for src_search_dir in src_search_dirs:
            if os.path.isfile(os.path.join(src_search_dir, src_file)):
                number_of_file_instances += 1
                dir_to_files_map[src_search_dir].append(src_file)
        if (number_of_file_instances > 1):
            raise Exception(
                'There is more than one instance of file %s in %s' %
                (src_file, src_search_dirs))
        if (number_of_file_instances < 1):
            raise Exception('Unable to find file %s in %s' %
                            (src_file, src_search_dirs))

    # Delete the old output file if any.
    if os.path.isfile(options.jar_path):
        os.remove(options.jar_path)

    # Jar the sources from every source search directory.
    for src_search_dir in src_search_dirs:
        if len(dir_to_files_map[src_search_dir]) > 0:
            JarSources(src_search_dir, dir_to_files_map[src_search_dir],
                       options.jar_path)
        else:
            raise Exception(
                'Directory %s does not contain any files and can be'
                ' removed from the list of directories to search' %
                src_search_dir)

    # Jar additional src jars
    if unzipped_jar_path:
        JarSources(unzipped_jar_path, ['.'], options.jar_path)

    if options.depfile:
        deps = []
        for src_dir in src_search_dirs:
            for root, _, filenames in os.walk(src_dir):
                deps.extend(os.path.join(root, f) for f in filenames)
        # Srcjar deps already captured in GN rules (no need to list them here).
        build_utils.WriteDepfile(options.depfile, options.jar_path, deps)

    # Clean up temporary output directory.
    if unzipped_jar_path:
        build_utils.DeleteDirectory(unzipped_jar_path)

    if options.stamp:
        build_utils.Touch(options.stamp)