def __main__(): # Parse Command Line parser = optparse.OptionParser() parser.add_option('-i', '--input', dest='input', default=None, help='Input file for filtering') parser.add_option('-j', '--jsonfile', dest='jsonfile', default=None, help='JSON array of filter specifications') parser.add_option('-o', '--output', dest='output', default=None, help='Output file for query results') parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='verbose') (options, args) = parser.parse_args() if options.input is not None: try: inputPath = os.path.abspath(options.input) inputFile = open(inputPath, 'r') except Exception as e: exit('Error1: %s' % (e)) else: inputFile = sys.stdin if options.output is not None: try: outputPath = os.path.abspath(options.output) outputFile = open(outputPath, 'w') except Exception as e: exit('Error2: %s' % (e)) else: outputFile = sys.stdout filters = None if options.jsonfile: try: with open(options.jsonfile) as fh: filters = json.load(fh) except Exception as e: exit('Error3: %s' % (e)) if options.verbose and filters: for f in filters: print('%s %s' % (f['filter'], ', '.join( ['%s: %s' % (k, f[k]) for k in set(f.keys()) - set(['filter'])])), file=sys.stdout) try: filter_file(inputFile, outputFile, filters=filters) except Exception as e: exit('Error: %s' % (e))
def __main__(): # Parse Command Line parser = optparse.OptionParser() parser.add_option('-i', '--input', dest='input', default=None, help='Input file for filtering') parser.add_option('-j', '--jsonfile', dest='jsonfile', default=None, help='JSON array of filter specifications') parser.add_option('-o', '--output', dest='output', default=None, help='Output file for query results') parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='verbose') (options, args) = parser.parse_args() if options.input is not None: try: inputPath = os.path.abspath(options.input) inputFile = open(inputPath, 'r') except Exception as e: exit('Error: %s' % (e)) else: inputFile = sys.stdin if options.output is not None: try: outputPath = os.path.abspath(options.output) outputFile = open(outputPath, 'w') except Exception as e: exit('Error: %s' % (e)) else: outputFile = sys.stdout filters = None if options.jsonfile: try: with open(options.jsonfile) as fh: filters = json.load(fh) except Exception as e: exit('Error: %s' % (e)) if options.verbose and filters: for f in filters: print('%s %s' % (f['filter'], ', '.join( ['%s: %s' % (k, f[k]) for k in set(f.keys()) - set(['filter'])])), file=sys.stdout) try: filter_file(inputFile, outputFile, filters=filters) except Exception as e: exit('Error: %s' % (e))
def verify_commit(original_commit, new_tree): """Verifies if new_tree is exactly original_commit after filters. Args: original_commit: commit hash in Chromium browser tree. new_tree: tree hash created for upstream branch commit. """ expected_file_list = filters.filter_file( [], utils.get_file_list(original_commit)) assert utils.git_mktree(expected_file_list) == new_tree
def main(): # Init args parser = argparse.ArgumentParser( description='Copy file from given commits') parser.add_argument( 'old_commit', metavar='old_commit', type=str, nargs=1, help='commit hash in upstream branch or browser repository ' 'we want to uprev from') parser.add_argument( 'new_commit', metavar='new_commit', type=str, nargs=1, help='commit hash in upstream branch or browser repository ' 'we want ot uprev to') parser.add_argument( '--dry_run', dest='dry_run', action='store_const', const=True, default=False) parser.add_argument( '--is_browser', dest='is_browser', action='store_const', const=True, default=False, help='is the commit hash in browser repository') arg = parser.parse_args(sys.argv[1:]) # Get old and new files. old_files = utils.get_file_list(arg.old_commit[0]) new_files = utils.get_file_list(arg.new_commit[0]) if arg.is_browser: old_files = filters.filter_file([], old_files) new_files = filters.filter_file([], new_files) assert filters.filter_file(old_files, []) == [] assert filters.filter_file(new_files, []) == [] # Generate a tree object for new files. old_tree = utils.git_mktree(old_files) new_tree = utils.git_mktree(new_files) newroot = utils.git_commit(old_tree, []) squashed = utils.git_commit(new_tree, [newroot]) # Generate patch for git am patch = subprocess.check_output(['git', 'format-patch', '--stdout', newroot+b'..'+squashed]) if arg.dry_run: print(patch.decode('utf-8')) else: subprocess.run(['git', 'am', '-3'], input=patch)
def main(): # Init args parser = argparse.ArgumentParser( description='Copy file from given commits') parser.add_argument('commit_hash', metavar='commit', type=str, nargs=1, help='commit hash to copy files from') parser.add_argument('--dry_run', dest='dry_run', action='store_const', const=True, default=False) arg = parser.parse_args(sys.argv[1:]) # Read file list from HEAD and upstream commit. upstream_files = utils.get_file_list(arg.commit_hash[0]) our_files = utils.get_file_list('HEAD') # Calculate target file list target_files = filters.filter_file(our_files, upstream_files) # Calculate operations needed ops = utils.gen_op(our_files, target_files) if arg.dry_run: # Print ops only on dry-run mode. print('\n'.join(repr(x) for x in ops)) return for op, f in ops: # Ignore if op is REP because we only want to copy missing files, not to # revert custom Chromium OS libchrome patch. assert type(op) == utils.DiffOperations if op == utils.DiffOperations.DEL: subprocess.check_call(['git', 'rm', f.path]), elif op == utils.DiffOperations.ADD: # Create directory recursively if not exist. os.makedirs(os.path.dirname(f.path), exist_ok=True) # Read file by git cat-file with blob object id to avoid heavy git checkout. with open(f.path, 'wb') as outfile: subprocess.check_call(['git', 'cat-file', 'blob', f.id], stdout=outfile) # Add to git index subprocess.check_call(['git', 'add', f.path])