def generate_patches_from_series(patches_dir, resolve=False): """Generates pathlib.Path for patches from a directory in GNU Quilt format""" for patch_path in parse_series(patches_dir / 'series'): if resolve: yield (patches_dir / patch_path).resolve() else: yield patch_path
def generate_patches_from_series(patches_dir, resolve=False): """Generates pathlib.Path for patches from a directory in GNU Quilt format""" for patch_path in parse_series(patches_dir / 'series'): if resolve: yield (patches_dir / patch_path).resolve() else: yield patch_path
def generate_rescue_paths(series_path): # Get patch paths from series patch_paths = list(parse_series(series_path / 'series')) for patch in patch_paths: with Path(series_path / patch).open(encoding='UTF-8') as patch_file: for line in patch_file: if line.startswith('--- a/'): line = line.split('--- a/', 1)[1].strip() yield Path(line)
def _read_series_file(patches_dir, series_file, join_dir=False): """ Returns a generator over the entries in the series file patches_dir is a pathlib.Path to the directory of patches series_file is a pathlib.Path relative to patches_dir join_dir indicates if the patches_dir should be joined with the series entries """ for entry in parse_series(patches_dir / series_file): if join_dir: yield patches_dir / entry else: yield entry
def _read_series_file(patches_dir, series_file, join_dir=False): """ Returns a generator over the entries in the series file patches_dir is a pathlib.Path to the directory of patches series_file is a pathlib.Path relative to patches_dir join_dir indicates if the patches_dir should be joined with the series entries """ for entry in parse_series(patches_dir / series_file): if join_dir: yield patches_dir / entry else: yield entry
def generate_rescue_paths(series_path): # Get patch paths from series patch_paths = list(parse_series(series_path / 'series')) for patch in patch_paths: # Read lines of .patch file with Path(series_path / patch).open(encoding='UTF-8') as patch_file: found = False for line in patch_file: if found: first_line = line.split('+++ b/', 1)[1].strip() msg(str(patch)) yield Path(first_line) found = False continue if line.startswith('--- /dev/null'): found = True
def main(): """CLI Entrypoint""" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '-s', '--series', type=Path, metavar='FILE', default=str(Path('patches', 'series')), help='The series file listing patches to apply. Default: %(default)s') parser.add_argument( '-p', '--patches', type=Path, metavar='DIRECTORY', default='patches', help='The patches directory to read from. Default: %(default)s') add_common_params(parser) file_source_group = parser.add_mutually_exclusive_group(required=True) file_source_group.add_argument( '-l', '--local', type=Path, metavar='DIRECTORY', help= 'Use a local source tree. It must be UNMODIFIED, otherwise the results will not be valid.' ) file_source_group.add_argument( '-r', '--remote', action='store_true', help= ('Download the required source tree files from Google. ' 'This feature requires the Python module "requests". If you do not want to ' 'install this, consider using --local instead.')) file_source_group.add_argument( '-c', '--cache-remote', type=Path, metavar='DIRECTORY', help= '(For debugging) Store the required remote files in an empty local directory' ) args = parser.parse_args() if args.cache_remote and not args.cache_remote.exists(): if args.cache_remote.parent.exists(): args.cache_remote.mkdir() else: parser.error('Parent of cache path {} does not exist'.format( args.cache_remote)) if not args.series.is_file(): parser.error('--series path is not a file or not found: {}'.format( args.series)) if not args.patches.is_dir(): parser.error( '--patches path is not a directory or not found: {}'.format( args.patches)) series_iterable = tuple(parse_series(args.series)) had_failure, patch_cache = _load_all_patches(series_iterable, args.patches) required_files = _get_required_files(patch_cache) files_under_test = _get_files_under_test(args, required_files, parser) had_failure |= _test_patches(series_iterable, patch_cache, files_under_test) if had_failure: get_logger().error('***FAILED VALIDATION; SEE ABOVE***') if not args.verbose: get_logger().info( '(For more error details, re-run with the "-v" flag)') parser.exit(status=1) else: get_logger().info('Passed validation (%d patches total)', len(series_iterable))
def compare_new_patches(old_dir_dst, new_dir_dst, tree_path, patch_bin_path=None): if (old_dir_dst / 'series').exists(): old_patches_list = list(parse_series(old_dir_dst / 'series')) else: old_patches_list = list('') print('\n *** NO OLD PATCHES FOUND ***') new_patches_list = list(parse_series(new_dir_dst / 'series')) old_patches_list_rev = list(reversed(old_patches_list)) patched_files = [] for patch in (old_patches_list + list(set(new_patches_list) - set(old_patches_list))): old_patch_dst = old_dir_dst / Path(patch) new_patch_dst = new_dir_dst / Path(patch) if patch in old_patches_list and patch in new_patches_list: if not (filecmp.cmp(new_patch_dst, old_patch_dst)): print('\n[STATUS: Patch ' + Path(patch).name + ' changed, applying updates]') new_patch = open(new_patch_dst).readlines() old_patch = open(old_patch_dst).readlines() hunk_found = False new_hunk = '' for line in new_patch: if line.startswith('--- '): if hunk_found: old_hunk = find_file_hunks(patched_file, old_patch) if (not old_hunk) or (old_hunk != new_hunk): patched_files.append(patched_file) temp_file(tree_path, patched_file, 'create') patch_from_line(old_patches_list_rev, old_dir_dst, patched_file, tree_path, patch_bin_path, reverse=True) patch_from_line(new_patches_list, new_dir_dst, patched_file, tree_path, patch_bin_path) hunk_found = False elif line.startswith('+++ b'): if line in patched_files: continue hunk_found = True new_hunk = prev_line patched_file = line if hunk_found: new_hunk += line prev_line = line if hunk_found: old_hunk = find_file_hunks(patched_file, old_patch) if (not old_hunk) or (old_hunk != new_hunk): patched_files.append(patched_file) temp_file(tree_path, patched_file, 'create') patch_from_line(old_patches_list_rev, old_dir_dst, patched_file, tree_path, patch_bin_path, reverse=True) patch_from_line(new_patches_list, new_dir_dst, patched_file, tree_path, patch_bin_path) for line in old_patch: if line.startswith('+++ b') and line not in new_patch: if line not in patched_files: patched_files.append(line) temp_file(tree_path, line, 'create') patch_from_line(old_patches_list_rev, old_dir_dst, line, tree_path, patch_bin_path, reverse=True) patch_from_line(new_patches_list, new_dir_dst, line, tree_path, patch_bin_path) elif patch in old_patches_list and patch not in new_patches_list: print('\n[STATUS: Reversing removed patch:' + old_patch_dst.name + ']') old_patch = open(old_patch_dst).readlines() for line in old_patch: if line.startswith('+++ b') and line not in patched_files: patched_file = line patched_files.append(patched_file) temp_file(tree_path, patched_file, 'create') patch_from_line(old_patches_list_rev, old_dir_dst, patched_file, tree_path, patch_bin_path, reverse=True) patch_from_line(new_patches_list, new_dir_dst, patched_file, tree_path, patch_bin_path) prev_line = line elif patch not in old_patches_list and patch in new_patches_list: print('\n[STATUS: Applying new patch:' + new_patch_dst.name + ']') new_patch = open(new_patch_dst).readlines() for line in new_patch: if line.startswith('+++ b') and line not in patched_files: patched_file = line patched_files.append(patched_file) if not prev_line.startswith('--- /dev'): temp_file(tree_path, patched_file, 'create') patch_from_line(old_patches_list_rev, old_dir_dst, patched_file, tree_path, patch_bin_path, reverse=True) patch_from_line(new_patches_list, new_dir_dst, patched_file, tree_path, patch_bin_path) prev_line = line for patched_file in patched_files: temp_file(tree_path, patched_file, 'replace') temp_file(tree_path, patched_file, 'remove') if patched_files: print('-------Patches updated!-------') else: print('-------No new patches!-------')
def main(): """CLI Entrypoint""" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '-s', '--series', type=Path, metavar='FILE', default=str(_ROOT_DIR / 'patches' / 'series'), help='The series file listing patches to apply. Default: %(default)s') parser.add_argument( '-p', '--patches', type=Path, metavar='DIRECTORY', default=str(_ROOT_DIR / 'patches'), help='The patches directory to read from. Default: %(default)s') parser.add_argument( '-v', '--verbose', action='store_true', help='Log more information to stdout/stderr') file_source_group = parser.add_mutually_exclusive_group(required=True) file_source_group.add_argument( '-l', '--local', type=Path, metavar='DIRECTORY', help= 'Use a local source tree. It must be UNMODIFIED, otherwise the results will not be valid.') file_source_group.add_argument( '-r', '--remote', action='store_true', help=('Download the required source tree files from Google. ' 'This feature requires the Python module "requests". If you do not want to ' 'install this, consider using --local instead.')) file_source_group.add_argument( '-c', '--cache-remote', type=Path, metavar='DIRECTORY', help='(For debugging) Store the required remote files in an empty local directory') args = parser.parse_args() if args.cache_remote and not args.cache_remote.exists(): if args.cache_remote.parent.exists(): args.cache_remote.mkdir() else: parser.error('Parent of cache path {} does not exist'.format(args.cache_remote)) if not args.series.is_file(): parser.error('--series path is not a file or not found: {}'.format(args.series)) if not args.patches.is_dir(): parser.error('--patches path is not a directory or not found: {}'.format(args.patches)) if args.verbose: get_logger(initial_level=logging.DEBUG) else: get_logger(initial_level=logging.INFO) series_iterable = tuple(parse_series(args.series)) had_failure, patch_cache = _load_all_patches(series_iterable, args.patches) required_files = _get_required_files(patch_cache) files_under_test = _get_files_under_test(args, required_files, parser) had_failure |= _test_patches(series_iterable, patch_cache, files_under_test) if had_failure: get_logger().error('***FAILED VALIDATION; SEE ABOVE***') if not args.verbose: get_logger().info('(For more error details, re-run with the "-v" flag)') parser.exit(status=1) else: get_logger().info('Passed validation')