def _populate_set_with_gn_flags(new_set, base_bundle, logger):
    """
    Adds items into set new_set from the base bundle's GN flags
    Entries that are not sorted are logged as warnings.
    Returns True if warnings were logged; False otherwise
    """
    warnings = False
    try:
        iterator = iter(base_bundle.gn_flags)
    except ValueError as exc:
        logger.error(str(exc))
        raise BuildkitAbort()
    try:
        previous = next(iterator)
    except StopIteration:
        return warnings
    for current in iterator:
        if current < previous:
            logger.warning(
                'In base bundle "%s" GN flags: "%s" should be sorted before "%s"',
                base_bundle.name, current, previous)
            warnings = True
        new_set.add('%s=%s' % (current, base_bundle.gn_flags[current]))
        previous = current
    return warnings
Пример #2
0
def compute_lists(source_tree, search_regex):
    """
    Compute the binary pruning and domain substitution lists of the source tree.
    Returns a tuple of two items in the following order:
    1. The sorted binary pruning list
    2. The sorted domain substitution list

    source_tree is a pathlib.Path to the source tree
    search_regex is a compiled regex object to search for domain names
    """
    pruning_set = set()
    domain_substitution_set = set()
    deferred_symlinks = dict(
    )  # POSIX resolved path -> set of POSIX symlink paths
    source_tree = source_tree.resolve()
    unused_patterns = UnusedPatterns()

    for path in source_tree.rglob('*'):
        if not path.is_file():
            # NOTE: Path.rglob() does not traverse symlink dirs; no need for special handling
            continue
        relative_path = path.relative_to(source_tree)
        if path.is_symlink():
            try:
                resolved_relative_posix = path.resolve().relative_to(
                    source_tree).as_posix()
            except ValueError:
                # Symlink leads out of the source tree
                continue
            if resolved_relative_posix in pruning_set:
                pruning_set.add(relative_path.as_posix())
            else:
                symlink_set = deferred_symlinks.get(resolved_relative_posix,
                                                    None)
                if symlink_set is None:
                    symlink_set = set()
                    deferred_symlinks[resolved_relative_posix] = symlink_set
                symlink_set.add(relative_path.as_posix())
            # Path has finished processing because...
            # Pruning: either symlink has been added or removal determination has been deferred
            # Domain substitution: Only the real paths can be added, not symlinks
            continue
        try:
            if should_prune(path, relative_path, unused_patterns):
                relative_posix_path = relative_path.as_posix()
                pruning_set.add(relative_posix_path)
                symlink_set = deferred_symlinks.pop(relative_posix_path,
                                                    tuple())
                if symlink_set:
                    pruning_set.update(symlink_set)
            elif should_domain_substitute(path, relative_path, search_regex,
                                          unused_patterns):
                domain_substitution_set.add(relative_path.as_posix())
        except:
            get_logger().exception('Unhandled exception while processing %s',
                                   relative_path)
            raise BuildkitAbort()
    return sorted(pruning_set), sorted(
        domain_substitution_set), unused_patterns
Пример #3
0
def main(args_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-a', '--auto-download', action='store_true',
        help='If specified, it will download the source code and dependencies '
             'for the --base-bundle given. Otherwise, only an existing '
             'buildspace tree will be used.')
    parser.add_argument(
        '-b', '--base-bundle', metavar='NAME', type=get_basebundle_verbosely,
        default='common', help='The base bundle to use. Default: %(default)s')
    parser.add_argument(
        '-p', '--pruning', metavar='PATH', type=Path,
        default='resources/config_bundles/common/pruning.list',
        help='The path to store pruning.list. Default: %(default)s')
    parser.add_argument(
        '-d', '--domain-substitution', metavar='PATH', type=Path,
        default='resources/config_bundles/common/domain_substitution.list',
        help='The path to store domain_substitution.list. Default: %(default)s')
    parser.add_argument(
        '--tree', metavar='PATH', type=Path, default=BUILDSPACE_TREE,
        help=('The path to the buildspace tree to create. '
              'If it is not empty, the source will not be unpacked. '
              'Default: %(default)s'))
    parser.add_argument(
        '--downloads', metavar='PATH', type=Path, default=BUILDSPACE_DOWNLOADS,
        help=('The path to the buildspace downloads directory. '
              'It must already exist. Default: %(default)s'))
    try:
        args = parser.parse_args(args_list)
        if args.tree.exists() and not dir_empty(args.tree):
            get_logger().info('Using existing buildspace tree at %s', args.tree)
        elif args.auto_download:
            source_retrieval.retrieve_and_extract(
                args.base_bundle, args.downloads, args.tree, prune_binaries=False)
        else:
            get_logger().error('No buildspace tree found and --auto-download '
                               'is not specified. Aborting.')
            raise BuildkitAbort()
        get_logger().info('Computing lists...')
        pruning_list, domain_substitution_list = compute_lists(
            args.tree, args.base_bundle.domain_regex.search_regex)
    except BuildkitAbort:
        exit(1)
    with args.pruning.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in pruning_list)
    with args.domain_substitution.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in domain_substitution_list)
Пример #4
0
def _get_current_commit():
    """
    Returns a string of the current commit hash.

    It assumes "git" is in PATH, and that buildkit is run within a git repository.

    Raises BuildkitAbort if invoking git fails.
    """
    result = subprocess.run(['git', 'rev-parse', '--verify', 'HEAD'],
                            stdout=subprocess.PIPE,
                            universal_newlines=True,
                            cwd=str(Path(__file__).resolve().parent))
    if result.returncode:
        get_logger().error('Unexpected return code %s', result.returncode)
        get_logger().error('Command output: %s', result.stdout)
        raise BuildkitAbort()
    return result.stdout.strip('\n')
def _merge_disjoints(pair_iterable, current_name, logger):
    """
    Merges disjoint sets with errors
    pair_iterable is an iterable of tuples (display_name, current_set, dependency_set, as_error)

    Returns True if warnings occured; False otherwise
    Raises BuildkitAbort if an error occurs
    """
    warnings = False
    for display_name, current_set, dependency_set, as_error in pair_iterable:
        if current_set.isdisjoint(dependency_set):
            current_set.update(dependency_set)
        else:
            if as_error:
                log_func = logger.error
            else:
                log_func = logger.warning
            log_func('%s of "%s" appear at least twice: %s', display_name,
                     current_name, current_set.intersection(dependency_set))
            if as_error:
                raise BuildkitAbort()
            warnings = True
    return warnings
def _get_current_commit_or_tag():
    """
    Returns a string of the current commit hash, or the tag name based
    on version.ini if the script is not in a git repo.

    It assumes "git" is in PATH, and that buildkit is run within a git repository.

    Raises BuildkitAbort if invoking git fails.
    """
    # Use presence of .git directory to determine if using git or not
    # since git will be aggressive in finding a git repository otherwise
    if not (Path(__file__).parent / '.git').exists(): #pylint: disable=no-member
        # Probably not in a git checkout; extrapolate tag name based on
        # version.ini
        return '{}-{}'.format(get_chromium_version(), get_release_revision())
    result = subprocess.run(['git', 'rev-parse', '--verify', 'HEAD'],
                            stdout=subprocess.PIPE,
                            universal_newlines=True,
                            cwd=str(Path(__file__).resolve().parent))
    if result.returncode:
        get_logger().error('Unexpected return code %s', result.returncode)
        get_logger().error('Command output: %s', result.stdout)
        raise BuildkitAbort()
    return result.stdout.strip('\n')
Пример #7
0
def main(args_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-a',
        '--auto-download',
        action='store_true',
        help='If specified, it will download the source code and dependencies '
        'for the --bundle given. Otherwise, only an existing '
        'source tree will be used.')
    parser.add_argument('-b',
                        '--bundle',
                        metavar='PATH',
                        type=Path,
                        default='config_bundles/common',
                        help='The bundle to use. Default: %(default)s')
    parser.add_argument(
        '--pruning',
        metavar='PATH',
        type=Path,
        default='config_bundles/common/pruning.list',
        help='The path to store pruning.list. Default: %(default)s')
    parser.add_argument(
        '--domain-substitution',
        metavar='PATH',
        type=Path,
        default='config_bundles/common/domain_substitution.list',
        help='The path to store domain_substitution.list. Default: %(default)s'
    )
    parser.add_argument(
        '-t',
        '--tree',
        metavar='PATH',
        type=Path,
        required=True,
        help=('The path to the source tree to create. '
              'If it is not empty, the source will not be unpacked.'))
    parser.add_argument('-c',
                        '--cache',
                        metavar='PATH',
                        type=Path,
                        help='The path to the downloads cache.')
    try:
        args = parser.parse_args(args_list)
        try:
            bundle = ConfigBundle(args.bundle)
        except BaseException:
            get_logger().exception('Error loading config bundle')
            raise BuildkitAbort()
        if args.tree.exists() and not dir_empty(args.tree):
            get_logger().info('Using existing source tree at %s', args.tree)
        elif args.auto_download:
            if not args.cache:
                get_logger().error('--cache is required with --auto-download')
                raise BuildkitAbort()
            downloads.retrieve_downloads(bundle, args.cache, True)
            downloads.check_downloads(bundle, args.cache)
            downloads.unpack_downloads(bundle, args.cache, args.tree)
        else:
            get_logger().error('No source tree found and --auto-download '
                               'is not specified. Aborting.')
            raise BuildkitAbort()
        get_logger().info('Computing lists...')
        pruning_list, domain_substitution_list = compute_lists(
            args.tree, bundle.domain_regex.search_regex)
    except BuildkitAbort:
        exit(1)
    with args.pruning.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in pruning_list)
    with args.domain_substitution.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in domain_substitution_list)
def _explore_base_bundle(current_name, journal, logger):
    """
    Explore the base bundle given by current_name. Modifies journal
    Returns True if warnings occured, False otherwise.
    Raises BuildkitAbort if fatal errors occured.
    """
    warnings = False

    if current_name in journal.results:
        # Node has been explored iff its results are stored
        return warnings

    # Indicate start of node exploration
    try:
        journal.unexplored_set.remove(current_name)
    except KeyError:
        # Exploration has begun but there are no results, so it still must be processing
        # its dependencies
        logger.error('Dependencies of "%s" are cyclical', current_name)
        raise BuildkitAbort()

    current_base_bundle = ConfigBundle.from_base_name(current_name,
                                                      load_depends=False)
    current_meta = BaseBundleMetaIni(current_base_bundle.path /
                                     BASEBUNDLEMETA_INI)

    # Populate current base bundle's data
    current_results = BaseBundleResult(leaves=set(),
                                       gn_flags=set(),
                                       patches=set())
    warnings = _populate_set_with_gn_flags(
        current_results.gn_flags, current_base_bundle, logger) or warnings
    warnings = _populate_set_with_patches(
        current_results.patches, journal.unused_patches, current_base_bundle,
        logger) or warnings
    warnings = _check_patches(current_base_bundle, logger) or warnings

    # Set an empty set just in case this node has no dependents
    if current_name not in journal.dependents:
        journal.dependents[current_name] = set()

    for dependency_name in current_meta.depends:
        # Update dependents
        if dependency_name not in journal.dependents:
            journal.dependents[dependency_name] = set()
        journal.dependents[dependency_name].add(current_name)

        # Explore dependencies
        warnings = _explore_base_bundle(dependency_name, journal,
                                        logger) or warnings

        # Merge sets of dependencies with the current
        warnings = _merge_disjoints((
            ('Patches', current_results.patches,
             journal.results[dependency_name].patches, False),
            ('GN flags', current_results.gn_flags,
             journal.results[dependency_name].gn_flags, False),
            ('Dependencies', current_results.leaves,
             journal.results[dependency_name].leaves, True),
        ), current_name, logger) or warnings
    if not current_results.leaves:
        # This node is a leaf node
        current_results.leaves.add(current_name)

    # Store results last to indicate it has been successfully explored
    journal.results[current_name] = current_results

    return warnings