def _squash(stack, iw, name, msg, save_template, patches, no_verify=False): # If a name was supplied on the command line, make sure it's OK. def bad_name(pn): return pn not in patches and stack.patches.exists(pn) def get_name(cd): return name or utils.make_patch_name(cd.message, bad_name) if name and bad_name(name): raise CmdException('Patch name "%s" already taken' % name) def make_squashed_patch(trans, new_commit_data): name = get_name(new_commit_data) trans.patches[name] = stack.repository.commit(new_commit_data) trans.unapplied.insert(0, name) trans = StackTransaction(stack, 'squash', allow_conflicts=True) push_new_patch = bool(set(patches) & set(trans.applied)) try: new_commit_data = _squash_patches(trans, patches, msg, save_template, no_verify) if new_commit_data: # We were able to construct the squashed commit # automatically. So just delete its constituent patches. to_push = trans.delete_patches(lambda pn: pn in patches) else: # Automatic construction failed. So push the patches # consecutively, so that a second construction attempt is # guaranteed to work. to_push = trans.pop_patches(lambda pn: pn in patches) for pn in patches: trans.push_patch(pn, iw) new_commit_data = _squash_patches( trans, patches, msg, save_template, no_verify ) popped_extra = trans.delete_patches(lambda pn: pn in patches) assert not popped_extra make_squashed_patch(trans, new_commit_data) # Push the new patch if necessary, and any unrelated patches we've # had to pop out of the way. if push_new_patch: trans.push_patch(get_name(new_commit_data), iw) for pn in to_push: trans.push_patch(pn, iw) except SaveTemplateDone: trans.abort(iw) return except TransactionHalted: pass return trans.run(iw)
def func(parser, options, args): """Synchronise a range of patches """ repository = directory.repository stack = repository.get_stack() if options.ref_branch: remote_stack = repository.get_stack(options.ref_branch) if remote_stack.name == stack.name: raise CmdException('Cannot synchronise with the current branch') remote_patches = remote_stack.patchorder.applied def merge_patch(commit, pname): return __branch_merge_patch(remote_stack, stack, commit, pname) elif options.series: patchdir = os.path.dirname(options.series) remote_patches = [] with open(options.series) as f: for line in f: pn = re.sub('#.*$', '', line).strip() if not pn: continue remote_patches.append(pn) def merge_patch(commit, pname): return __series_merge_patch(patchdir, stack, commit, pname) else: raise CmdException('No remote branch or series specified') applied = list(stack.patchorder.applied) unapplied = list(stack.patchorder.unapplied) if options.all: patches = applied elif len(args) != 0: patches = parse_patches(args, applied + unapplied, len(applied), ordered=True) elif applied: patches = [applied[-1]] else: parser.error('no patches applied') assert patches # only keep the patches to be synchronised sync_patches = [p for p in patches if p in remote_patches] if not sync_patches: raise CmdException('No common patches to be synchronised') iw = repository.default_iw # pop to the one before the first patch to be synchronised first_patch = sync_patches[0] if first_patch in applied: to_pop = applied[applied.index(first_patch) + 1:] if to_pop: trans = StackTransaction(stack, 'sync (pop)', check_clean_iw=iw) popped_extra = trans.pop_patches(lambda pn: pn in to_pop) assert not popped_extra retval = trans.run(iw) assert not retval pushed = [first_patch] else: to_pop = [] pushed = [] popped = to_pop + [p for p in patches if p in unapplied] trans = StackTransaction(stack, 'sync', check_clean_iw=iw) try: for p in pushed + popped: if p in popped: trans.push_patch(p, iw=iw) if p not in sync_patches: # nothing to synchronise continue # the actual sync out.start('Synchronising "%s"' % p) commit = trans.patches[p] # the actual merging (either from a branch or an external file) tree = merge_patch(commit, p) if tree: trans.patches[p] = commit.data.set_tree(tree).commit( repository) out.done('updated') else: out.done() except TransactionHalted: pass return trans.run(iw)