Пример #1
0
def main(arg_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    config_group = parser.add_mutually_exclusive_group()
    config_group.add_argument(
        '-b',
        '--base-bundle',
        metavar='NAME',
        dest='bundle',
        action=NewBaseBundleAction,
        help=
        ('The base config bundle name to use (located in resources/config_bundles). '
         'Mutually exclusive with --user-bundle-path. '))
    config_group.add_argument(
        '-u',
        '--user-bundle',
        metavar='PATH',
        dest='bundle',
        type=lambda x: ConfigBundle(Path(x)),
        help=('The path to a user bundle to use. '
              'Mutually exclusive with --base-bundle-name. '))
    args = parser.parse_args(args=arg_list)

    try:
        domain_substitution.process_bundle_patches(args.bundle, invert=True)
    except ValueError:
        get_logger().exception('A regex pair is not invertible')
        parser.exit(status=1)
Пример #2
0
def main():
    """CLI Entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-b',
        '--bundle',
        action='append',
        type=Path,
        metavar='DIRECTORY',
        help=('Update patches for a config bundle. Specify multiple times to '
              'update multiple bundles. Without specifying, all bundles will be updated.'))
    parser.add_argument(
        '-s',
        '--source-dir',
        type=Path,
        required=True,
        metavar='DIRECTORY',
        help='Path to the source tree')
    parser.add_argument(
        '-a',
        '--abort-on-failure',
        action='store_true',
        help=('If specified, abort on the first patch that fails to refresh. '
              'This allows for one to refresh the rest of the patches in the series.'))
    args = parser.parse_args()

    if not args.source_dir.exists():
        parser.error('Cannot find source tree at: {}'.format(args.source_dir))
    if args.bundle:
        for bundle_path in args.bundle:
            if not bundle_path.exists():
                parser.error('Could not find config bundle at: {}'.format(bundle_path))

    patches_dir = Path(os.environ.get('QUILT_PATCHES', 'patches'))
    if not patches_dir.exists():
        parser.error('Cannot find patches directory at: {}'.format(patches_dir))

    series_path = Path(os.environ.get('QUILT_SERIES', 'series'))
    if not series_path.exists() and not (patches_dir / series_path).exists(): #pylint: disable=no-member
        parser.error('Cannot find series file at "{}" or "{}"'.format(series_path,
                                                                      patches_dir / series_path))

    # Path to bundle -> ConfigBundle without dependencies
    bundle_cache = dict(
        map(lambda x: (x, ConfigBundle(x, load_depends=False)), _CONFIG_BUNDLES_PATH.iterdir()))
    patch_trie = _get_patch_trie(bundle_cache, args.bundle)
    run_quilt = _get_run_quilt(args.source_dir, series_path, patches_dir)
    # Remove currently applied patches
    if series_path.exists():
        if run_quilt('top').returncode != 2:
            _LOGGER.info('Popping applied patches')
            run_quilt('pop', '-a', check=True)
    had_failure = _refresh_patches(patch_trie, bundle_cache, series_path, run_quilt,
                                   args.abort_on_failure)
    if had_failure:
        _LOGGER.error('Error(s) occured while refreshing. See output above.')
        parser.exit(status=1)
    _LOGGER.info('Successfully refreshed patches.')
Пример #3
0
def main():
    """CLI Entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-b',
        '--bundle',
        action='append',
        type=Path,
        metavar='DIRECTORY',
        help=('Verify patches for a config bundle. Specify multiple times to '
              'verify multiple bundles. Without specifying, all bundles will be verified.'))
    parser.add_argument(
        '-v', '--verbose', action='store_true', help='Log more information to stdout/stderr')
    file_source_group = parser.add_mutually_exclusive_group(required=True)
    file_source_group.add_argument(
        '-l', '--local', type=Path, metavar='DIRECTORY', help='Use a local source tree')
    file_source_group.add_argument(
        '-r',
        '--remote',
        action='store_true',
        help='Download the required source tree files from Google')
    file_source_group.add_argument(
        '-c',
        '--cache-remote',
        type=Path,
        metavar='DIRECTORY',
        help='(For debugging) Store the required remote files in an empty local directory')
    args = parser.parse_args()
    if args.cache_remote and not args.cache_remote.exists():
        if args.cache_remote.parent.exists():
            args.cache_remote.mkdir()
        else:
            parser.error('Parent of cache path {} does not exist'.format(args.cache_remote))

    if args.verbose:
        get_logger(initial_level=logging.DEBUG, prepend_timestamp=False, log_init=False)
    else:
        get_logger(initial_level=logging.INFO, prepend_timestamp=False, log_init=False)

    if args.bundle:
        for bundle_path in args.bundle:
            if not bundle_path.exists():
                parser.error('Could not find config bundle at: {}'.format(bundle_path))

    # Path to bundle -> ConfigBundle without dependencies
    bundle_cache = dict(
        map(lambda x: (x, ConfigBundle(x, load_depends=False)), _CONFIG_BUNDLES_PATH.iterdir()))
    patch_trie = _get_patch_trie(bundle_cache, args.bundle)
    patch_cache = _load_all_patches(bundle_cache.values())
    required_files = _get_required_files(patch_cache)
    orig_files = _get_orig_files(args, required_files, parser)
    had_failure = _test_patches(patch_trie, bundle_cache, patch_cache, orig_files)
    if had_failure:
        if not args.verbose:
            get_logger().info('(For more error details, re-run with the "-v" flag)')
        parser.exit(status=1)
Пример #4
0
def main():
    """CLI Entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-b',
        '--bundle',
        action='append',
        type=Path,
        metavar='DIRECTORY',
        help=('Verify patches for a config bundle. Specify multiple times to '
              'verify multiple bundles. Without specifying, all bundles will be verified.'))
    file_source_group = parser.add_mutually_exclusive_group(required=True)
    file_source_group.add_argument(
        '-l', '--local', type=Path, metavar='DIRECTORY', help='Use a local source tree')
    file_source_group.add_argument(
        '-r',
        '--remote',
        action='store_true',
        help='Download the required source tree files from Google')
    file_source_group.add_argument(
        '-c',
        '--cache-remote',
        type=Path,
        metavar='DIRECTORY',
        help='(For debugging) Store the required remote files in an empty local directory')
    args = parser.parse_args()
    if args.cache_remote and not args.cache_remote.exists():
        if args.cache_remote.parent.exists():
            args.cache_remote.mkdir()
        else:
            parser.error('Parent of cache path {} does not exist'.format(args.cache_remote))

    # Path to bundle -> ConfigBundle without dependencies
    bundle_cache = dict(
        map(lambda x: (x, ConfigBundle(x, load_depends=False)), _CONFIG_BUNDLES_PATH.iterdir()))
    patch_trie = _get_patch_trie(bundle_cache, args.bundle)
    patch_cache = _load_all_patches(bundle_cache.values())
    required_files = _get_required_files(patch_cache)
    if args.local:
        orig_files = _retrieve_local_files(required_files, args.local)
    else: # --remote and --cache-remote
        orig_files = _retrieve_remote_files(required_files)
        if args.cache_remote:
            for file_path, file_content in orig_files.items():
                if not (args.cache_remote / file_path).parent.exists():
                    (args.cache_remote / file_path).parent.mkdir(parents=True)
                with (args.cache_remote / file_path).open('w', encoding=ENCODING) as cache_file:
                    cache_file.write('\n'.join(file_content))
            parser.exit()
    had_failure = _test_patches(patch_trie, bundle_cache, patch_cache, orig_files)
    if had_failure:
        parser.exit(status=1)
Пример #5
0
def main(arg_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    config_group = parser.add_mutually_exclusive_group()
    config_group.add_argument(
        '-b',
        '--base-bundle',
        metavar='NAME',
        dest='bundle',
        action=NewBaseBundleAction,
        help=
        ('The base config bundle name to use (located in resources/config_bundles). '
         'Mutually exclusive with --user-bundle-path. '))
    config_group.add_argument(
        '-u',
        '--user-bundle',
        metavar='PATH',
        dest='bundle',
        type=lambda x: ConfigBundle(Path(x)),
        help=('The path to a user bundle to use. '
              'Mutually exclusive with --base-bundle-name. '))
    args = parser.parse_args(args=arg_list)

    logger = get_logger()

    search_regex = re.compile('|'.join(
        map(lambda x: x[0].pattern,
            args.bundle.domain_regex.get_pairs(invert=True))))
    for patch_path in args.bundle.patches.patch_iter():
        if patch_path.exists():
            with patch_path.open(encoding=ENCODING) as file_obj:
                try:
                    patchset = unidiff.PatchSet(file_obj.read())
                except unidiff.errors.UnidiffParseError:
                    logger.exception('Could not parse patch: %s', patch_path)
                    continue
                if _check_substituted_domains(patchset, search_regex):
                    logger.warning('Patch has substituted domains: %s',
                                   patch_path)
        else:
            logger.warning('Patch not found: %s', patch_path)
Пример #6
0
def main(args_list=None):
    """CLI entrypoint"""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-a',
        '--auto-download',
        action='store_true',
        help='If specified, it will download the source code and dependencies '
        'for the --bundle given. Otherwise, only an existing '
        'source tree will be used.')
    parser.add_argument('-b',
                        '--bundle',
                        metavar='PATH',
                        type=Path,
                        default='config_bundles/common',
                        help='The bundle to use. Default: %(default)s')
    parser.add_argument(
        '--pruning',
        metavar='PATH',
        type=Path,
        default='config_bundles/common/pruning.list',
        help='The path to store pruning.list. Default: %(default)s')
    parser.add_argument(
        '--domain-substitution',
        metavar='PATH',
        type=Path,
        default='config_bundles/common/domain_substitution.list',
        help='The path to store domain_substitution.list. Default: %(default)s'
    )
    parser.add_argument(
        '-t',
        '--tree',
        metavar='PATH',
        type=Path,
        required=True,
        help=('The path to the source tree to create. '
              'If it is not empty, the source will not be unpacked.'))
    parser.add_argument('-c',
                        '--cache',
                        metavar='PATH',
                        type=Path,
                        help='The path to the downloads cache.')
    try:
        args = parser.parse_args(args_list)
        try:
            bundle = ConfigBundle(args.bundle)
        except BaseException:
            get_logger().exception('Error loading config bundle')
            raise BuildkitAbort()
        if args.tree.exists() and not dir_empty(args.tree):
            get_logger().info('Using existing source tree at %s', args.tree)
        elif args.auto_download:
            if not args.cache:
                get_logger().error('--cache is required with --auto-download')
                raise BuildkitAbort()
            downloads.retrieve_downloads(bundle, args.cache, True)
            downloads.check_downloads(bundle, args.cache)
            downloads.unpack_downloads(bundle, args.cache, args.tree)
        else:
            get_logger().error('No source tree found and --auto-download '
                               'is not specified. Aborting.')
            raise BuildkitAbort()
        get_logger().info('Computing lists...')
        pruning_list, domain_substitution_list = compute_lists(
            args.tree, bundle.domain_regex.search_regex)
    except BuildkitAbort:
        exit(1)
    with args.pruning.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in pruning_list)
    with args.domain_substitution.open('w', encoding=ENCODING) as file_obj:
        file_obj.writelines('%s\n' % line for line in domain_substitution_list)
def _explore_base_bundle(current_name, journal, logger):
    """
    Explore the base bundle given by current_name. Modifies journal
    Returns True if warnings occured, False otherwise.
    Raises BuildkitAbort if fatal errors occured.
    """
    warnings = False

    if current_name in journal.results:
        # Node has been explored iff its results are stored
        return warnings

    # Indicate start of node exploration
    try:
        journal.unexplored_set.remove(current_name)
    except KeyError:
        # Exploration has begun but there are no results, so it still must be processing
        # its dependencies
        logger.error('Dependencies of "%s" are cyclical', current_name)
        raise BuildkitAbort()

    current_base_bundle = ConfigBundle.from_base_name(current_name,
                                                      load_depends=False)
    current_meta = BaseBundleMetaIni(current_base_bundle.path /
                                     BASEBUNDLEMETA_INI)

    # Populate current base bundle's data
    current_results = BaseBundleResult(leaves=set(),
                                       gn_flags=set(),
                                       patches=set())
    warnings = _populate_set_with_gn_flags(
        current_results.gn_flags, current_base_bundle, logger) or warnings
    warnings = _populate_set_with_patches(
        current_results.patches, journal.unused_patches, current_base_bundle,
        logger) or warnings
    warnings = _check_patches(current_base_bundle, logger) or warnings

    # Set an empty set just in case this node has no dependents
    if current_name not in journal.dependents:
        journal.dependents[current_name] = set()

    for dependency_name in current_meta.depends:
        # Update dependents
        if dependency_name not in journal.dependents:
            journal.dependents[dependency_name] = set()
        journal.dependents[dependency_name].add(current_name)

        # Explore dependencies
        warnings = _explore_base_bundle(dependency_name, journal,
                                        logger) or warnings

        # Merge sets of dependencies with the current
        warnings = _merge_disjoints((
            ('Patches', current_results.patches,
             journal.results[dependency_name].patches, False),
            ('GN flags', current_results.gn_flags,
             journal.results[dependency_name].gn_flags, False),
            ('Dependencies', current_results.leaves,
             journal.results[dependency_name].leaves, True),
        ), current_name, logger) or warnings
    if not current_results.leaves:
        # This node is a leaf node
        current_results.leaves.add(current_name)

    # Store results last to indicate it has been successfully explored
    journal.results[current_name] = current_results

    return warnings