Esempio n. 1
0
def main(args_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-b', '--base-bundle', metavar='NAME', action=NewBaseBundleAction,
                        required=True, help='The base bundle to use')
    parser.add_argument('-p', '--pruning', metavar='PATH', type=Path, required=True,
                        help='The path to store pruning.list')
    parser.add_argument('-d', '--domain-substitution', metavar='PATH', type=Path, required=True,
                        help='The path to store domain_substitution.list')
    parser.add_argument('--tree', metavar='PATH', type=Path, default=BUILDSPACE_TREE,
                        help=('The path to the buildspace tree to create. '
                              'If it is not empty, the source will not be unpacked. '
                              'Default: %s') % BUILDSPACE_TREE)
    parser.add_argument('--downloads', metavar='PATH', type=Path, default=BUILDSPACE_DOWNLOADS,
                        help=('The path to the buildspace downloads directory. '
                              'It must already exist. Default: %s') % BUILDSPACE_DOWNLOADS)
    args = parser.parse_args(args_list)

    try:
        if args.tree.exists() and not dir_empty(args.tree):
            get_logger().info('Using existing buildspace tree at %s', args.tree)
        else:
            source_retrieval.retrieve_and_extract(
                args.base_bundle, args.downloads, args.tree, prune_binaries=False)
        get_logger().info('Computing lists...')
        pruning_list, domain_substitution_list = compute_lists(
            args.tree, args.base_bundle.domain_regex.search_regex)
    except BuildkitAbort:
        exit(1)
    with args.pruning.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in pruning_list)
    with args.domain_substitution.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in domain_substitution_list)
Esempio n. 2
0
def main(arg_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    config_group = parser.add_mutually_exclusive_group()
    config_group.add_argument(
        '-b',
        '--base-bundle',
        metavar='NAME',
        dest='bundle',
        action=NewBaseBundleAction,
        help=
        ('The base config bundle name to use (located in resources/config_bundles). '
         'Mutually exclusive with --user-bundle-path. '))
    config_group.add_argument(
        '-u',
        '--user-bundle',
        metavar='PATH',
        dest='bundle',
        type=lambda x: ConfigBundle(Path(x)),
        help=('The path to a user bundle to use. '
              'Mutually exclusive with --base-bundle-name. '))
    args = parser.parse_args(args=arg_list)

    try:
        domain_substitution.process_bundle_patches(args.bundle, invert=True)
    except ValueError:
        get_logger().exception('A regex pair is not invertible')
        parser.exit(status=1)
Esempio n. 3
0
def _get_gitiles_commit_before_date(repo_url, target_branch, target_datetime):
    """Returns the hexadecimal hash of the closest commit before target_datetime"""
    json_log_url = '{repo}/+log/{branch}?format=JSON'.format(
        repo=repo_url, branch=target_branch)
    response = requests.get(json_log_url)
    response.raise_for_status()
    git_log = json.loads(
        response.text[5:])  # Trim closing delimiters for various structures
    assert len(git_log) == 2  # 'log' and 'next' entries
    assert 'log' in git_log
    assert git_log['log']
    git_log = git_log['log']
    # Check boundary conditions
    if _get_gitiles_git_log_date(git_log[0]) < target_datetime:
        # Newest commit is older than target datetime
        return git_log[0]['commit']
    if _get_gitiles_git_log_date(git_log[-1]) > target_datetime:
        # Oldest commit is newer than the target datetime; assume oldest is close enough.
        get_logger().warning(
            'Oldest entry in gitiles log for repo "%s" is newer than target; '
            'continuing with oldest entry...')
        return git_log[-1]['commit']
    # Do binary search
    low_index = 0
    high_index = len(git_log) - 1
    mid_index = high_index
    while low_index != high_index:
        mid_index = low_index + (high_index - low_index) // 2
        if _get_gitiles_git_log_date(git_log[mid_index]) > target_datetime:
            low_index = mid_index + 1
        else:
            high_index = mid_index
    return git_log[mid_index]['commit']
Esempio n. 4
0
def compute_lists(source_tree, search_regex):
    """
    Compute the binary pruning and domain substitution lists of the source tree.
    Returns a tuple of two items in the following order:
    1. The sorted binary pruning list
    2. The sorted domain substitution list

    source_tree is a pathlib.Path to the source tree
    search_regex is a compiled regex object to search for domain names
    """
    pruning_set = set()
    domain_substitution_set = set()
    deferred_symlinks = dict(
    )  # POSIX resolved path -> set of POSIX symlink paths
    source_tree = source_tree.resolve()
    unused_patterns = UnusedPatterns()

    for path in source_tree.rglob('*'):
        if not path.is_file():
            # NOTE: Path.rglob() does not traverse symlink dirs; no need for special handling
            continue
        relative_path = path.relative_to(source_tree)
        if path.is_symlink():
            try:
                resolved_relative_posix = path.resolve().relative_to(
                    source_tree).as_posix()
            except ValueError:
                # Symlink leads out of the source tree
                continue
            if resolved_relative_posix in pruning_set:
                pruning_set.add(relative_path.as_posix())
            else:
                symlink_set = deferred_symlinks.get(resolved_relative_posix,
                                                    None)
                if symlink_set is None:
                    symlink_set = set()
                    deferred_symlinks[resolved_relative_posix] = symlink_set
                symlink_set.add(relative_path.as_posix())
            # Path has finished processing because...
            # Pruning: either symlink has been added or removal determination has been deferred
            # Domain substitution: Only the real paths can be added, not symlinks
            continue
        try:
            if should_prune(path, relative_path, unused_patterns):
                relative_posix_path = relative_path.as_posix()
                pruning_set.add(relative_posix_path)
                symlink_set = deferred_symlinks.pop(relative_posix_path,
                                                    tuple())
                if symlink_set:
                    pruning_set.update(symlink_set)
            elif should_domain_substitute(path, relative_path, search_regex,
                                          unused_patterns):
                domain_substitution_set.add(relative_path.as_posix())
        except:
            get_logger().exception('Unhandled exception while processing %s',
                                   relative_path)
            raise BuildkitAbort()
    return sorted(pruning_set), sorted(
        domain_substitution_set), unused_patterns
def _validate_deps(deps_text):
    """Returns True if the DEPS file passes validation; False otherwise"""
    try:
        _DepsNodeVisitor().visit(ast.parse(deps_text))
    except _UnexpectedSyntaxError as exc:
        get_logger().error('%s', exc)
        return False
    return True
 def sleep_for_retry(self, response=None):
     """Sleeps for Retry-After, and logs the sleep time"""
     if response:
         retry_after = self.get_retry_after(response)
         if retry_after:
             get_logger().info(
                 'Got HTTP status %s with Retry-After header. Retrying after %s seconds...',
                 response.status, retry_after)
         else:
             get_logger().info(
                 'Could not find Retry-After header for HTTP response %s. Status reason: %s',
                 response.status, response.reason)
     return super().sleep_for_retry(response)
Esempio n. 7
0
def _download_googlesource_file(download_session, repo_url, version, relative_path):
    """
    Returns the contents of the text file with path within the given
    googlesource.com repo as a string.
    """
    if 'googlesource.com' not in repo_url:
        raise ValueError('Repository URL is not a googlesource.com URL: {}'.format(repo_url))
    full_url = repo_url + '/+/{}/{}?format=TEXT'.format(version, str(relative_path))
    get_logger(prepend_timestamp=False, log_init=False).debug('Downloading: %s', full_url)
    response = download_session.get(full_url)
    response.raise_for_status()
    # Assume all files that need patching are compatible with UTF-8
    return base64.b64decode(response.text, validate=True).decode('UTF-8')
Esempio n. 8
0
    def log_unused(self):
        """
        Logs unused patterns and prefixes

        Returns True if there are unused patterns or prefixes; False otherwise
        """
        have_unused = False
        for name in self._all_names:
            current_set = getattr(self, name, None)
            if current_set:
                get_logger().error('Unused from %s: %s', name.upper(), current_set)
                have_unused = True
        return have_unused
Esempio n. 9
0
def _retrieve_remote_files(file_iter):
    """
    Retrieves all file paths in file_iter from Google

    file_iter is an iterable of strings that are relative UNIX paths to
        files in the Chromium source.

    Returns a dict of relative UNIX path strings to a list of lines in the file as strings
    """

    files = dict()

    root_deps_tree = _initialize_deps_tree()

    try:
        total_files = len(file_iter)
    except TypeError:
        total_files = None

    logger = get_logger()
    if total_files is None:
        logger.info('Downloading remote files...')
    else:
        logger.info('Downloading %d remote files...', total_files)
    last_progress = 0
    file_count = 0
    fallback_repo_manager = _FallbackRepoManager()
    with requests.Session() as download_session:
        download_session.stream = False  # To ensure connection to Google can be reused
        for file_path in file_iter:
            if total_files:
                file_count += 1
                current_progress = file_count * 100 // total_files // 5 * 5
                if current_progress != last_progress:
                    last_progress = current_progress
                    logger.info('%d%% downloaded', current_progress)
            else:
                current_progress = file_count // 20 * 20
                if current_progress != last_progress:
                    last_progress = current_progress
                    logger.info('%d files downloaded', current_progress)
            try:
                files[file_path] = _download_source_file(
                    download_session, root_deps_tree, fallback_repo_manager,
                    file_path).split('\n')
            except _NotInRepoError:
                get_logger().warning(
                    'Could not find "%s" remotely. Skipping...', file_path)
    return files
Esempio n. 10
0
def _get_current_commit():
    """
    Returns a string of the current commit hash.

    It assumes "git" is in PATH, and that buildkit is run within a git repository.

    Raises BuildkitAbort if invoking git fails.
    """
    result = subprocess.run(['git', 'rev-parse', '--verify', 'HEAD'],
                            stdout=subprocess.PIPE,
                            universal_newlines=True,
                            cwd=str(Path(__file__).resolve().parent))
    if result.returncode:
        get_logger().error('Unexpected return code %s', result.returncode)
        get_logger().error('Command output: %s', result.stdout)
        raise BuildkitAbort()
    return result.stdout.strip('\n')
Esempio n. 11
0
def _retrieve_local_files(file_iter, source_dir):
    """
    Retrieves all file paths in file_iter from the local source tree

    file_iter is an iterable of strings that are relative UNIX paths to
        files in the Chromium source.

    Returns a dict of relative UNIX path strings to a list of lines in the file as strings
    """
    files = dict()
    for file_path in file_iter:
        try:
            files[file_path] = (source_dir / file_path).read_text().split('\n')
        except FileNotFoundError:
            get_logger().warning('Missing file from patches: %s', file_path)
    if not files:
        get_logger().error('All files used by patches are missing!')
    return files
Esempio n. 12
0
    def get_fallback(self, current_relative_path, current_node, root_deps_tree):
        """
        Helper for _download_source_file

        It returns a new (repo_url, version, new_relative_path) to attempt a file download with
        """
        assert len(current_node) == 3
        # GN special processing
        try:
            new_relative_path = current_relative_path.relative_to('tools/gn')
        except ValueError:
            pass
        else:
            if current_node is root_deps_tree[Path('src')]:
                get_logger().info('Redirecting to GN repo version %s for path: %s', self.gn_version,
                                  current_relative_path)
                return (self._GN_REPO_URL, self.gn_version, new_relative_path)
        return None, None, None
Esempio n. 13
0
def _download_source_file(download_session, root_deps_tree, fallback_repo_manager, target_file):
    """
    Downloads the source tree file from googlesource.com

    download_session is an active requests.Session() object
    deps_dir is a pathlib.Path to the directory containing a DEPS file.
    """
    current_node, current_relative_path = _get_target_file_deps_node(download_session,
                                                                     root_deps_tree, target_file)
    # Attempt download with potential fallback logic
    repo_url, version, _ = current_node
    try:
        # Download with DEPS-provided repo
        return _download_googlesource_file(download_session, repo_url, version,
                                           current_relative_path)
    except _NotInRepoError:
        pass
    get_logger().debug(
        'Path "%s" (relative: "%s") not found using DEPS tree; finding fallback repo...',
        target_file, current_relative_path)
    repo_url, version, current_relative_path = fallback_repo_manager.get_fallback(
        current_relative_path, current_node, root_deps_tree)
    if not repo_url:
        get_logger().error('No fallback repo found for "%s" (relative: "%s")', target_file,
                           current_relative_path)
        raise _NotInRepoError()
    try:
        # Download with fallback repo
        return _download_googlesource_file(download_session, repo_url, version,
                                           current_relative_path)
    except _NotInRepoError:
        pass
    get_logger().error('File "%s" (relative: "%s") not found in fallback repo "%s", version "%s"',
                       target_file, current_relative_path, repo_url, version)
    raise _NotInRepoError()
Esempio n. 14
0
def _test_patches(patch_trie, bundle_cache, patch_cache, orig_files):
    """
    Tests the patches with DFS in the trie of config bundles

    Returns a boolean indicating if any of the patches have failed
    """
    # Stack of iterables over each node's children
    # First, insert iterable over root node's children
    node_iter_stack = [iter(patch_trie.items())]
    # Stack of files at each node differing from the parent
    # The root node thus contains all the files to be patched
    file_layers = collections.ChainMap(orig_files)
    # Whether any branch had failed validation
    had_failure = False
    while node_iter_stack:
        try:
            child_path, grandchildren = next(node_iter_stack[-1])
        except StopIteration:
            # Finished exploring all children of this node
            node_iter_stack.pop()
            del file_layers.maps[0]
            continue
        # Add storage for child's patched files
        file_layers = file_layers.new_child()
        # Apply children's patches
        get_logger().info('Verifying at depth %s: %s ...',
                          len(node_iter_stack), child_path.name)

        # Potential optimization: Use interval tree data structure instead of copying
        # the entire array to track only diffs

        had_failure, branch_validation_failed = _apply_child_bundle_patches(
            child_path, had_failure, file_layers, patch_cache, bundle_cache)
        if branch_validation_failed:
            # Add blank children to force stack to move onto the next branch
            node_iter_stack.append(iter(tuple()))
        else:
            # Explore this child's children
            node_iter_stack.append(iter(grandchildren.items()))
    return had_failure
Esempio n. 15
0
def _apply_child_bundle_patches(child_path, had_failure, file_layers,
                                patch_cache, bundle_cache):
    """Helper for _test_patches"""
    # Whether the curent patch trie branch failed validation
    branch_validation_failed = False

    assert child_path in bundle_cache
    try:
        child_patch_order = bundle_cache[child_path].patch_order
    except KeyError:
        # No patches in the bundle
        pass
    else:
        patches_outdated = bundle_cache[child_path].bundlemeta.patches_outdated
        for patch_path_str in child_patch_order:
            for patched_file in patch_cache[patch_path_str]:
                try:
                    _apply_file_unidiff(patched_file, file_layers.maps[0],
                                        file_layers.parents)
                except _PatchValidationError as exc:
                    # Branch failed validation; abort
                    get_logger().error(
                        "Validation failure for file '%s' from patch '%s': %s",
                        patched_file.path, patch_path_str, str(exc))
                    branch_validation_failed = True
                    had_failure = had_failure or not patches_outdated
                    break
                except BaseException:
                    # Branch failed validation; abort
                    get_logger().exception(
                        "Error processing file '%s' from patch '%s'",
                        patched_file.path, patch_path_str)
                    branch_validation_failed = True
                    had_failure = had_failure or not patches_outdated
                    break
            if branch_validation_failed:
                if patches_outdated:
                    get_logger().warning(
                        '%s is marked with outdated patches. Ignoring failure...',
                        child_path.name)
                break
        if branch_validation_failed != patches_outdated:
            # Metadata for patch validity is out-of-date
            get_logger().error(
                '%s patch validity is inconsistent with patches_outdated marking in bundlemeta',
                child_path.name)
            had_failure = True
    return had_failure, branch_validation_failed
Esempio n. 16
0
def main():
    """CLI Entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-b',
        '--bundle',
        action='append',
        type=Path,
        metavar='DIRECTORY',
        help=('Verify patches for a config bundle. Specify multiple times to '
              'verify multiple bundles. Without specifying, all bundles will be verified.'))
    parser.add_argument(
        '-v', '--verbose', action='store_true', help='Log more information to stdout/stderr')
    file_source_group = parser.add_mutually_exclusive_group(required=True)
    file_source_group.add_argument(
        '-l', '--local', type=Path, metavar='DIRECTORY', help='Use a local source tree')
    file_source_group.add_argument(
        '-r',
        '--remote',
        action='store_true',
        help='Download the required source tree files from Google')
    file_source_group.add_argument(
        '-c',
        '--cache-remote',
        type=Path,
        metavar='DIRECTORY',
        help='(For debugging) Store the required remote files in an empty local directory')
    args = parser.parse_args()
    if args.cache_remote and not args.cache_remote.exists():
        if args.cache_remote.parent.exists():
            args.cache_remote.mkdir()
        else:
            parser.error('Parent of cache path {} does not exist'.format(args.cache_remote))

    if args.verbose:
        get_logger(initial_level=logging.DEBUG, prepend_timestamp=False, log_init=False)
    else:
        get_logger(initial_level=logging.INFO, prepend_timestamp=False, log_init=False)

    if args.bundle:
        for bundle_path in args.bundle:
            if not bundle_path.exists():
                parser.error('Could not find config bundle at: {}'.format(bundle_path))

    # Path to bundle -> ConfigBundle without dependencies
    bundle_cache = dict(
        map(lambda x: (x, ConfigBundle(x, load_depends=False)), _CONFIG_BUNDLES_PATH.iterdir()))
    patch_trie = _get_patch_trie(bundle_cache, args.bundle)
    patch_cache = _load_all_patches(bundle_cache.values())
    required_files = _get_required_files(patch_cache)
    orig_files = _get_orig_files(args, required_files, parser)
    had_failure = _test_patches(patch_trie, bundle_cache, patch_cache, orig_files)
    if had_failure:
        if not args.verbose:
            get_logger().info('(For more error details, re-run with the "-v" flag)')
        parser.exit(status=1)
def _get_current_commit_or_tag():
    """
    Returns a string of the current commit hash, or the tag name based
    on version.ini if the script is not in a git repo.

    It assumes "git" is in PATH, and that buildkit is run within a git repository.

    Raises BuildkitAbort if invoking git fails.
    """
    # Use presence of .git directory to determine if using git or not
    # since git will be aggressive in finding a git repository otherwise
    if not (Path(__file__).parent / '.git').exists(): #pylint: disable=no-member
        # Probably not in a git checkout; extrapolate tag name based on
        # version.ini
        return '{}-{}'.format(get_chromium_version(), get_release_revision())
    result = subprocess.run(['git', 'rev-parse', '--verify', 'HEAD'],
                            stdout=subprocess.PIPE,
                            universal_newlines=True,
                            cwd=str(Path(__file__).resolve().parent))
    if result.returncode:
        get_logger().error('Unexpected return code %s', result.returncode)
        get_logger().error('Command output: %s', result.stdout)
        raise BuildkitAbort()
    return result.stdout.strip('\n')
Esempio n. 18
0
def _retrieve_remote_files(file_iter):
    """
    Retrieves all file paths in file_iter from Google

    file_iter is an iterable of strings that are relative UNIX paths to
        files in the Chromium source.

    Returns a dict of relative UNIX path strings to a list of lines in the file as strings
    """

    # Load requests here so it isn't a dependency for local file reading
    import requests

    files = dict()

    deps_tree = _initialize_deps_tree()

    try:
        total_files = len(file_iter)
    except TypeError:
        total_files = None

    logger = get_logger()
    if total_files is None:
        logger.info('Downloading remote files...')
    else:
        logger.info('Downloading %d remote files...', total_files)
    last_progress = 0
    file_count = 0
    with requests.Session() as download_session:
        download_session.stream = False  # To ensure connection to Google can be reused
        for file_path in file_iter:
            if total_files:
                file_count += 1
                current_progress = file_count * 100 // total_files // 5 * 5
                if current_progress != last_progress:
                    last_progress = current_progress
                    logger.info('%d%% downloaded', current_progress)
            else:
                current_progress = file_count // 20 * 20
                if current_progress != last_progress:
                    last_progress = current_progress
                    logger.info('%d files downloaded', current_progress)
            files[file_path] = _download_source_file(download_session,
                                                     deps_tree,
                                                     file_path).split('\n')
    return files
Esempio n. 19
0
def main(arg_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    config_group = parser.add_mutually_exclusive_group()
    config_group.add_argument(
        '-b',
        '--base-bundle',
        metavar='NAME',
        dest='bundle',
        action=NewBaseBundleAction,
        help=
        ('The base config bundle name to use (located in resources/config_bundles). '
         'Mutually exclusive with --user-bundle-path. '))
    config_group.add_argument(
        '-u',
        '--user-bundle',
        metavar='PATH',
        dest='bundle',
        type=lambda x: ConfigBundle(Path(x)),
        help=('The path to a user bundle to use. '
              'Mutually exclusive with --base-bundle-name. '))
    args = parser.parse_args(args=arg_list)

    logger = get_logger()

    search_regex = re.compile('|'.join(
        map(lambda x: x[0].pattern,
            args.bundle.domain_regex.get_pairs(invert=True))))
    for patch_path in args.bundle.patches.patch_iter():
        if patch_path.exists():
            with patch_path.open(encoding=ENCODING) as file_obj:
                try:
                    patchset = unidiff.PatchSet(file_obj.read())
                except unidiff.errors.UnidiffParseError:
                    logger.exception('Could not parse patch: %s', patch_path)
                    continue
                if _check_substituted_domains(patchset, search_regex):
                    logger.warning('Patch has substituted domains: %s',
                                   patch_path)
        else:
            logger.warning('Patch not found: %s', patch_path)
Esempio n. 20
0
def main():
    """CLI entrypoint"""

    logger = get_logger(prepend_timestamp=False, log_init=False)
    warnings = True

    patches_dir = get_resources_dir() / PATCHES_DIR
    config_bundles_dir = get_resources_dir() / CONFIG_BUNDLES_DIR

    journal = ExplorationJournal(
        # base bundles not explored yet
        unexplored_set=set(map(
            lambda x: x.name,
            config_bundles_dir.iterdir())),
        # base bundle name -> namedtuple(leaves=set(), gn_flags=set())
        results=dict(),
        # dependency -> set of dependents
        dependents=dict(),
        # patches unused by patch orders
        unused_patches=set(map(
            lambda x: str(x.relative_to(patches_dir)), patches_dir.rglob('*.patch')))
    )
    try:
        # Explore and validate base bundles
        while journal.unexplored_set:
            warnings = _explore_base_bundle(
                next(iter(journal.unexplored_set)), journal, logger) or warnings
        # Check for config file entries that should be merged into dependencies
        warnings = _check_mergability((
            ('GN flags', lambda x: journal.results[x].gn_flags),
            ('patches', lambda x: journal.results[x].patches),
        ), journal.dependents, logger) or warnings
        # Check for patch files not referenced in patch_orders
        if journal.unused_patches:
            logger.warning('Unused patches found: %s', journal.unused_patches)
            warnings = True
    except BuildkitAbort:
        exit(2)

    if warnings:
        exit(1)
Esempio n. 21
0
def main(args_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-a', '--auto-download', action='store_true',
        help='If specified, it will download the source code and dependencies '
             'for the --base-bundle given. Otherwise, only an existing '
             'buildspace tree will be used.')
    parser.add_argument(
        '-b', '--base-bundle', metavar='NAME', type=get_basebundle_verbosely,
        default='common', help='The base bundle to use. Default: %(default)s')
    parser.add_argument(
        '-p', '--pruning', metavar='PATH', type=Path,
        default='resources/config_bundles/common/pruning.list',
        help='The path to store pruning.list. Default: %(default)s')
    parser.add_argument(
        '-d', '--domain-substitution', metavar='PATH', type=Path,
        default='resources/config_bundles/common/domain_substitution.list',
        help='The path to store domain_substitution.list. Default: %(default)s')
    parser.add_argument(
        '--tree', metavar='PATH', type=Path, default=BUILDSPACE_TREE,
        help=('The path to the buildspace tree to create. '
              'If it is not empty, the source will not be unpacked. '
              'Default: %(default)s'))
    parser.add_argument(
        '--downloads', metavar='PATH', type=Path, default=BUILDSPACE_DOWNLOADS,
        help=('The path to the buildspace downloads directory. '
              'It must already exist. Default: %(default)s'))
    try:
        args = parser.parse_args(args_list)
        if args.tree.exists() and not dir_empty(args.tree):
            get_logger().info('Using existing buildspace tree at %s', args.tree)
        elif args.auto_download:
            source_retrieval.retrieve_and_extract(
                args.base_bundle, args.downloads, args.tree, prune_binaries=False)
        else:
            get_logger().error('No buildspace tree found and --auto-download '
                               'is not specified. Aborting.')
            raise BuildkitAbort()
        get_logger().info('Computing lists...')
        pruning_list, domain_substitution_list = compute_lists(
            args.tree, args.base_bundle.domain_regex.search_regex)
    except BuildkitAbort:
        exit(1)
    with args.pruning.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in pruning_list)
    with args.domain_substitution.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in domain_substitution_list)
Esempio n. 22
0
def main(args_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-a',
        '--auto-download',
        action='store_true',
        help='If specified, it will download the source code and dependencies '
        'for the --bundle given. Otherwise, only an existing '
        'source tree will be used.')
    parser.add_argument('-b',
                        '--bundle',
                        metavar='PATH',
                        type=Path,
                        default='config_bundles/common',
                        help='The bundle to use. Default: %(default)s')
    parser.add_argument(
        '--pruning',
        metavar='PATH',
        type=Path,
        default='config_bundles/common/pruning.list',
        help='The path to store pruning.list. Default: %(default)s')
    parser.add_argument(
        '--domain-substitution',
        metavar='PATH',
        type=Path,
        default='config_bundles/common/domain_substitution.list',
        help='The path to store domain_substitution.list. Default: %(default)s'
    )
    parser.add_argument(
        '-t',
        '--tree',
        metavar='PATH',
        type=Path,
        required=True,
        help=('The path to the source tree to create. '
              'If it is not empty, the source will not be unpacked.'))
    parser.add_argument('-c',
                        '--cache',
                        metavar='PATH',
                        type=Path,
                        help='The path to the downloads cache.')
    try:
        args = parser.parse_args(args_list)
        try:
            bundle = ConfigBundle(args.bundle)
        except BaseException:
            get_logger().exception('Error loading config bundle')
            raise BuildkitAbort()
        if args.tree.exists() and not dir_empty(args.tree):
            get_logger().info('Using existing source tree at %s', args.tree)
        elif args.auto_download:
            if not args.cache:
                get_logger().error('--cache is required with --auto-download')
                raise BuildkitAbort()
            downloads.retrieve_downloads(bundle, args.cache, True)
            downloads.check_downloads(bundle, args.cache)
            downloads.unpack_downloads(bundle, args.cache, args.tree)
        else:
            get_logger().error('No source tree found and --auto-download '
                               'is not specified. Aborting.')
            raise BuildkitAbort()
        get_logger().info('Computing lists...')
        pruning_list, domain_substitution_list = compute_lists(
            args.tree, bundle.domain_regex.search_regex)
    except BuildkitAbort:
        exit(1)
    with args.pruning.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in pruning_list)
    with args.domain_substitution.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in domain_substitution_list)
Esempio n. 23
0
import argparse
import os
import subprocess
import sys
from pathlib import Path

sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from buildkit.common import get_logger
from buildkit.config import ConfigBundle
sys.path.pop(0)

_CONFIG_BUNDLES_PATH = Path(__file__).parent.parent / 'config_bundles'
_PATCHES_PATH = Path(__file__).parent.parent / 'patches'

_LOGGER = get_logger(prepend_timestamp=False, log_init=False)


def _get_run_quilt(source_dir, series_path, patches_dir):
    """Create a function to run quilt with proper settings"""
    def _run_quilt(*args, log_stderr=True, **kwargs):
        result = subprocess.run(
            ('quilt', '--quiltrc', '-', *args),
            universal_newlines=True,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            cwd=str(source_dir),
            env={
                'QUILT_PATCHES':
                str(patches_dir.resolve()),
                'QUILT_SERIES':
Esempio n. 24
0
def _apply_child_bundle_patches(child_path, had_failure, file_layers,
                                patch_cache, bundle_cache):
    """Helper for _test_patches"""
    # Whether the curent patch trie branch failed validation
    branch_validation_failed = False

    assert child_path in bundle_cache
    try:
        child_patch_order = bundle_cache[child_path].patch_order
    except KeyError:
        # No patches in the bundle
        pass
    else:
        patches_outdated = bundle_cache[child_path].bundlemeta.patches_outdated
        for patch_path_str in child_patch_order:
            for patched_file in patch_cache[patch_path_str]:
                try:
                    _apply_file_unidiff(patched_file, file_layers.maps[0],
                                        file_layers.parents)
                except _PatchValidationError as exc:
                    # Branch failed validation; abort
                    get_logger().warning('Patch failed validation: %s',
                                         patch_path_str)
                    get_logger().debug(
                        'Specifically, file "%s" failed validation: %s',
                        patched_file.path, exc)
                    branch_validation_failed = True
                    had_failure = had_failure or not patches_outdated
                    break
                except BaseException:
                    # Branch failed validation; abort
                    get_logger().warning('Patch failed validation: %s',
                                         patch_path_str)
                    get_logger().debug(
                        'Specifically, file "%s" caused exception while applying:',
                        patched_file.path,
                        exc_info=True)
                    branch_validation_failed = True
                    had_failure = had_failure or not patches_outdated
                    break
            if branch_validation_failed:
                if patches_outdated:
                    get_logger().warning(
                        '%s is marked with outdated patches. Ignoring failure...',
                        child_path.name)
                break
        if branch_validation_failed != patches_outdated:
            # Metadata for patch validity is out-of-date
            if branch_validation_failed:
                get_logger().error((
                    "%s patches have become outdated. "
                    "Please add 'patches_outdated = true' to its bundlemeta.ini"
                ), child_path.name)
            else:
                get_logger().error((
                    '"%s" is no longer out-of-date! '
                    'Please remove the patches_outdated marking from its bundlemeta.ini'
                ), child_path.name)
            had_failure = True
    return had_failure, branch_validation_failed
Esempio n. 25
0
def main():
    """CLI Entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '--downloads-cache',
        type=Path,
        metavar='PATH',
        default='../../downloads_cache',
        help='The path to the downloads cache')
    parser.add_argument(
        '--disable-ssl-verification',
        action='store_true',
        help='Disables SSL verification for downloading')
    parser.add_argument(
        '--7z-path',
        dest='sevenz_path',
        default=SEVENZIP_USE_REGISTRY,
        help=('Command or path to 7-Zip\'s "7z" binary. If "_use_registry" is '
              'specified, determine the path from the registry. Default: %(default)s'))
    args = parser.parse_args()

    # Set common variables
    bundle_path = Path(__file__).parent / 'config_bundles/windows'
    bundle = buildkit.config.ConfigBundle(bundle_path)
    source_tree = Path(__file__).resolve().parent.parent
    domsubcache = Path(__file__).parent / 'domsubcache.tar.gz'

    # Test environment
    _test_python2(parser.error)

    # Setup environment
    if not args.downloads_cache.exists():
        args.downloads_cache.mkdir()
    _make_tmp_paths()

    # Retrieve downloads
    get_logger().info('Downloading required files...')
    buildkit.downloads.retrieve_downloads(bundle, args.downloads_cache, True,
                                          args.disable_ssl_verification)
    try:
        buildkit.downloads.check_downloads(bundle, args.downloads_cache)
    except buildkit.downloads.HashMismatchError as exc:
        get_logger().error('File checksum does not match: %s', exc)
        parser.exit(1)

    # Unpack downloads
    extractors = {
        ExtractorEnum.SEVENZIP: args.sevenz_path,
    }
    get_logger().info('Unpacking downloads...')
    buildkit.downloads.unpack_downloads(bundle, args.downloads_cache, source_tree, extractors)

    # Prune binaries
    unremovable_files = buildkit.extraction.prune_dir(source_tree, bundle.pruning)
    if unremovable_files:
        get_logger().error('Files could not be pruned: %s', unremovable_files)
        parser.exit(1)

    # Apply patches
    buildkit.patches.apply_patches(
        buildkit.patches.patch_paths_by_bundle(bundle), source_tree, patch_bin_path=None)

    # Substitute domains
    buildkit.domain_substitution.apply_substitution(bundle, source_tree, domsubcache)

    # Output args.gn
    (source_tree / 'out/Default').mkdir(parents=True)
    (source_tree / 'out/Default/args.gn').write_text(str(bundle.gn_flags), encoding=ENCODING)

    # Run GN bootstrap
    _run_build_process(
        shutil.which('python'), 'tools\\gn\\bootstrap\\bootstrap.py', '-o', 'out\\Default\\gn.exe',
        '--skip-generate-buildfiles')

    # Run gn gen
    _run_build_process('out\\Default\\gn.exe', 'gen', 'out\\Default', '--fail-on-unused-args')

    # Run ninja
    _run_build_process('third_party\\ninja\\ninja.exe', '-C', 'out\\Default', 'chrome',
                       'chromedriver')
Esempio n. 26
0
def _test_patches(patch_trie, bundle_cache, patch_cache, orig_files):
    """
    Tests the patches with DFS in the trie of config bundles

    Returns a boolean indicating if any of the patches have failed
    """
    # Stack of iterables over each node's children
    # First, insert iterable over root node's children
    node_iter_stack = [iter(patch_trie.items())]
    # Stack of files at each node differing from the parent
    # The root node thus contains all the files to be patched
    file_layers = collections.ChainMap(orig_files)
    # Whether any branch had failed validation
    had_failure = False
    while node_iter_stack:
        try:
            child_path, grandchildren = next(node_iter_stack[-1])
        except StopIteration:
            # Finished exploring all children of this node
            node_iter_stack.pop()
            del file_layers.maps[0]
            continue
        # Add storage for child's patched files
        file_layers = file_layers.new_child()
        # Apply children's patches
        get_logger(
            prepend_timestamp=False, log_init=False).info('Verifying at depth %s: %s',
                                                          len(node_iter_stack), child_path.name)

        # Potential optimization: Use interval tree data structure instead of copying
        # the entire array to track only diffs

        # Whether the curent patch trie branch failed validation
        branch_validation_failed = False
        assert child_path in bundle_cache
        try:
            child_patch_order = bundle_cache[child_path].patch_order
        except KeyError:
            # No patches in the bundle
            pass
        else:
            for patch_path_str in child_patch_order:
                for patched_file in patch_cache[patch_path_str]:
                    try:
                        _apply_file_unidiff(patched_file, file_layers.maps[0], file_layers.parents)
                    except _PatchValidationError as exc:
                        # Branch failed validation; abort
                        get_logger(
                            prepend_timestamp=False, log_init=False).error(
                                "Error processing file '%s' from patch '%s': %s", patched_file.path,
                                patch_path_str, str(exc))
                        branch_validation_failed = True
                        had_failure = True
                        break
                    except BaseException:
                        # Branch failed validation; abort
                        get_logger(
                            prepend_timestamp=False, log_init=False).exception(
                                "Error processing file '%s' from patch '%s'", patched_file.path,
                                patch_path_str)
                        branch_validation_failed = True
                        had_failure = True
                        break
                if branch_validation_failed:
                    break
        if branch_validation_failed:
            # Add blank children to force stack to move onto the next branch
            node_iter_stack.append(iter(tuple()))
        else:
            # Explore this child's children
            node_iter_stack.append(iter(grandchildren.items()))
    return had_failure
 def _sleep_backoff(self):
     """Log info about backoff sleep"""
     get_logger().info('Running HTTP request sleep backoff')
     super()._sleep_backoff()