def islfilesrepo(repo): if ('largefiles' in repo.requirements and util.any(shortnameslash in f[0] for f in repo.store.datafiles())): return True return util.any(openlfdirstate(repo.ui, repo, False))
def verify(self, revs, contents=False): '''Verify the existence (and, optionally, contents) of every big file revision referenced by every changeset in revs. Return 0 if all is well, non-zero on any errors.''' write = self.ui.write failed = False write(_('searching %d changesets for largefiles\n') % len(revs)) verified = set() # set of (filename, filenode) tuples for rev in revs: cctx = self.repo[rev] cset = "%d:%s" % (cctx.rev(), node.short(cctx.node())) failed = util.any(self._verifyfile( cctx, cset, contents, standin, verified) for standin in cctx) numrevs = len(verified) numlfiles = len(set([fname for (fname, fnode) in verified])) if contents: write(_('verified contents of %d revisions of %d largefiles\n') % (numrevs, numlfiles)) else: write(_('verified existence of %d revisions of %d largefiles\n') % (numrevs, numlfiles)) return int(failed)
def verify(self, revs, contents=False): '''Verify the existence (and, optionally, contents) of every big file revision referenced by every changeset in revs. Return 0 if all is well, non-zero on any errors.''' write = self.ui.write failed = False write(_('searching %d changesets for largefiles\n') % len(revs)) verified = set() # set of (filename, filenode) tuples for rev in revs: cctx = self.repo[rev] cset = "%d:%s" % (cctx.rev(), node.short(cctx.node())) failed = util.any( self._verifyfile(cctx, cset, contents, standin, verified) for standin in cctx) numrevs = len(verified) numlfiles = len(set([fname for (fname, fnode) in verified])) if contents: write( _('verified contents of %d revisions of %d largefiles\n') % (numrevs, numlfiles)) else: write( _('verified existence of %d revisions of %d largefiles\n') % (numrevs, numlfiles)) return int(failed)
def buildcmdargs(name, *args, **opts): r"""Build list of command-line arguments >>> buildcmdargs('push', branch='foo') ['push', '--branch', 'foo'] >>> buildcmdargs('graft', r=['0', '1']) ['graft', '-r', '0', '-r', '1'] >>> buildcmdargs('log', no_merges=True, quiet=False, limit=None) ['log', '--no-merges'] >>> buildcmdargs('commit', user='') ['commit', '--user', ''] positional arguments: >>> buildcmdargs('add', 'foo', 'bar') ['add', 'foo', 'bar'] >>> buildcmdargs('cat', '-foo', rev='0') ['cat', '--rev', '0', '--', '-foo'] type conversion to string: >>> from PyQt4.QtCore import QString >>> buildcmdargs('email', r=[0, 1]) ['email', '-r', '0', '-r', '1'] >>> buildcmdargs('grep', 'foo', rev=2) ['grep', '--rev', '2', 'foo'] >>> buildcmdargs('tag', u'\xc0', message=u'\xc1') ['tag', '--message', u'\xc1', u'\xc0'] >>> buildcmdargs(QString('tag'), QString(u'\xc0'), message=QString(u'\xc1')) [u'tag', '--message', u'\xc1', u'\xc0'] """ stringfy = '%s'.__mod__ # (unicode, QString) -> unicode, otherwise -> str fullargs = [stringfy(name)] for k, v in opts.iteritems(): if v is None: continue if len(k) == 1: aname = '-%s' % k else: aname = '--%s' % k.replace('_', '-') if isinstance(v, bool): if v: fullargs.append(aname) elif isinstance(v, list): for e in v: fullargs.append(aname) fullargs.append(stringfy(e)) else: fullargs.append(aname) fullargs.append(stringfy(v)) args = map(stringfy, args) if util.any(e.startswith('-') for e in args): fullargs.append('--') fullargs.extend(args) return fullargs
def _updateAnnotateOption(self): # make sure at least one option is checked if not util.any(a.isChecked() for a in self._annoptactions): self.sender().setChecked(True) self._setupLineAnnotation() self.fillModel() self._saveAnnotateSettings()
def _loadAnnotateSettings(self): s = QSettings() wb = "Annotate/" for a in self._annoptactions: a.setChecked(s.value(wb + a.data().toString()).toBool()) if not util.any(a.isChecked() for a in self._annoptactions): self._annoptactions[-1].setChecked(True) # 'rev' by default self._setupLineAnnotation()
def pathinstatus(path, status, uncleanpaths): """Test path is included by the status filter""" if util.any(c in self._statusfilter and path in e for c, e in status.iteritems()): return True if 'C' in self._statusfilter and path not in uncleanpaths: return True return False
def run(ui, *pats, **opts): showclean = util.any(os.path.isfile(e) for e in pats) rev = opts.get('rev', []) cmdoptions = { 'all':False, 'clean':showclean, 'ignored':False, 'modified':True, 'added':True, 'removed':True, 'deleted':True, 'unknown':True, 'exclude':[], 'include':[], 'debug':True, 'verbose':True, 'git':False, 'rev':rev, 'check':True, 'subrepo':True } return GStatus(ui, None, None, pats, cmdoptions)
def _updateattachmodes(self): """Update checkboxes to select the embedding style of the patch""" attachmodes = [self._qui.attach_check, self._qui.inline_check] body = self._qui.body_check # --attach and --inline are exclusive if self.sender() in attachmodes and self.sender().isChecked(): for w in attachmodes: if w is not self.sender(): w.setChecked(False) # --body is mandatory if no attach modes are specified body.setEnabled(util.any(w.isChecked() for w in attachmodes)) if not body.isEnabled(): body.setChecked(True)
def _asconfigliststr(value): r""" >>> _asconfigliststr('foo') 'foo' >>> _asconfigliststr('foo bar') '"foo bar"' >>> _asconfigliststr('foo,bar') '"foo,bar"' >>> _asconfigliststr('foo "bar"') '"foo \\"bar\\""' """ # ui.configlist() uses isspace(), which is locale-dependent if util.any(c.isspace() or c == ',' for c in value): return '"' + value.replace('"', '\\"') + '"' else: return value
def _defaultlanguage(): if os.name != 'nt' or util.any(e in os.environ for e in _localeenvs): return # honor posix-style env var # On Windows, UI language can be determined by GetUserDefaultUILanguage(), # but gettext doesn't take it into account. # Note that locale.getdefaultlocale() uses GetLocaleInfo(), which may be # different from UI language. # # For details, please read "User Interface Language Management": # http://msdn.microsoft.com/en-us/library/dd374098(v=VS.85).aspx try: from ctypes import windll # requires Python>=2.5 langid = windll.kernel32.GetUserDefaultUILanguage() return locale.windows_locale[langid] except (ImportError, AttributeError, KeyError): pass
def getsearchmode(query): try: ctx = web.repo[query] except (error.RepoError, error.LookupError): # query is not an exact revision pointer, need to # decide if it's a revset expression or keywords pass else: return MODE_REVISION, ctx revdef = 'reverse(%s)' % query try: tree, pos = revset.parse(revdef) except ParseError: # can't parse to a revset tree return MODE_KEYWORD, query if revset.depth(tree) <= 2: # no revset syntax used return MODE_KEYWORD, query if util.any((token, (value or '')[:3]) == ('string', 're:') for token, value, pos in revset.tokenize(revdef)): return MODE_KEYWORD, query funcsused = revset.funcsused(tree) if not funcsused.issubset(revset.safesymbols): return MODE_KEYWORD, query mfunc = revset.match(web.repo.ui, revdef) try: revs = mfunc(web.repo) return MODE_REVSET, revs # ParseError: wrongly placed tokens, wrongs arguments, etc # RepoLookupError: no such revision, e.g. in 'revision:' # Abort: bookmark/tag not exists # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo except (ParseError, RepoLookupError, Abort, LookupError): return MODE_KEYWORD, query
def getsearchmode(query): try: ctx = web.repo[query] except (error.RepoError, error.LookupError): # query is not an exact revision pointer, need to # decide if it's a revset expression or keywords pass else: return MODE_REVISION, ctx revdef = 'reverse(%s)' % query try: tree, pos = revset.parse(revdef) except ParseError: # can't parse to a revset tree return MODE_KEYWORD, query if revset.depth(tree) <= 2: # no revset syntax used return MODE_KEYWORD, query if util.any((token, (value or '')[:3]) == ('string', 're:') for token, value, pos in revset.tokenize(revdef)): return MODE_KEYWORD, query funcsused = revset.funcsused(tree) if not funcsused.issubset(revset.safesymbols): return MODE_KEYWORD, query mfunc = revset.match(web.repo.ui, revdef) try: revs = mfunc(web.repo, list(web.repo)) return MODE_REVSET, revs # ParseError: wrongly placed tokens, wrongs arguments, etc # RepoLookupError: no such revision, e.g. in 'revision:' # Abort: bookmark/tag not exists # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo except (ParseError, RepoLookupError, Abort, LookupError): return MODE_KEYWORD, query
# execute the command cmd = args.pop(0) try: self._cmdtable[cmd](self, args) except KeyError: return self._runextcommand(cmdline) def _parsecmdline(self, cmdline): """Split command line string to imitate a unix shell""" try: args = shlex.split(hglib.fromunicode(cmdline)) except ValueError, e: raise ValueError(_('command parse error: %s') % e) for e in args: e = util.expandpath(e) if util.any(c in e for c in '*?[]'): expanded = glob.glob(os.path.join(self.cwd, e)) if not expanded: raise ValueError( _('no matches found: %s') % hglib.tounicode(e)) for p in expanded: yield p else: yield e def _runextcommand(self, cmdline): self._extproc.setWorkingDirectory(hglib.tounicode(self.cwd)) self._extproc.start(cmdline, QIODevice.ReadOnly) @_cmdtable def _cmd_hg(self, args):
def _histedit(ui, repo, state, *freeargs, **opts): # TODO only abort if we try and histedit mq patches, not just # blanket if mq patches are applied somewhere mq = getattr(repo, 'mq', None) if mq and mq.applied: raise util.Abort(_('source has mq patches applied')) # basic argument incompatibility processing outg = opts.get('outgoing') cont = opts.get('continue') abort = opts.get('abort') force = opts.get('force') rules = opts.get('commands', '') revs = opts.get('rev', []) goal = 'new' # This invocation goal, in new, continue, abort if force and not outg: raise util.Abort(_('--force only allowed with --outgoing')) if cont: if util.any((outg, abort, revs, freeargs, rules)): raise util.Abort(_('no arguments allowed with --continue')) goal = 'continue' elif abort: if util.any((outg, revs, freeargs, rules)): raise util.Abort(_('no arguments allowed with --abort')) goal = 'abort' else: if os.path.exists(os.path.join(repo.path, 'histedit-state')): raise util.Abort(_('history edit already in progress, try ' '--continue or --abort')) if outg: if revs: raise util.Abort(_('no revisions allowed with --outgoing')) if len(freeargs) > 1: raise util.Abort( _('only one repo argument allowed with --outgoing')) else: revs.extend(freeargs) if len(revs) != 1: raise util.Abort( _('histedit requires exactly one ancestor revision')) replacements = [] keep = opts.get('keep', False) # rebuild state if goal == 'continue': state = histeditstate(repo) state.read() state = bootstrapcontinue(ui, state, opts) elif goal == 'abort': state = histeditstate(repo) state.read() mapping, tmpnodes, leafs, _ntm = processreplacement(state) ui.debug('restore wc to old parent %s\n' % node.short(state.topmost)) # check whether we should update away parentnodes = [c.node() for c in repo[None].parents()] for n in leafs | set([state.parentctx.node()]): if n in parentnodes: hg.clean(repo, state.topmost) break else: pass cleanupnode(ui, repo, 'created', tmpnodes) cleanupnode(ui, repo, 'temp', leafs) state.clear() return else: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) topmost, empty = repo.dirstate.parents() if outg: if freeargs: remote = freeargs[0] else: remote = None root = findoutgoing(ui, repo, remote, force, opts) else: rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs))) if len(rr) != 1: raise util.Abort(_('The specified revisions must have ' 'exactly one common root')) root = rr[0].node() revs = between(repo, root, topmost, keep) if not revs: raise util.Abort(_('%s is not an ancestor of working directory') % node.short(root)) ctxs = [repo[r] for r in revs] if not rules: rules = '\n'.join([makedesc(c) for c in ctxs]) rules += '\n\n' rules += editcomment % (node.short(root), node.short(topmost)) rules = ui.edit(rules, ui.username()) # Save edit rules in .hg/histedit-last-edit.txt in case # the user needs to ask for help after something # surprising happens. f = open(repo.join('histedit-last-edit.txt'), 'w') f.write(rules) f.close() else: if rules == '-': f = sys.stdin else: f = open(rules) rules = f.read() f.close() rules = [l for l in (r.strip() for r in rules.splitlines()) if l and not l.startswith('#')] rules = verifyrules(rules, repo, ctxs) parentctx = repo[root].parents()[0] state.parentctx = parentctx state.rules = rules state.keep = keep state.topmost = topmost state.replacements = replacements while state.rules: state.write() action, ha = state.rules.pop(0) ui.debug('histedit: processing %s %s\n' % (action, ha)) actfunc = actiontable[action] state.parentctx, replacement_ = actfunc(ui, state, ha, opts) state.replacements.extend(replacement_) hg.update(repo, state.parentctx.node()) mapping, tmpnodes, created, ntm = processreplacement(state) if mapping: for prec, succs in mapping.iteritems(): if not succs: ui.debug('histedit: %s is dropped\n' % node.short(prec)) else: ui.debug('histedit: %s is replaced by %s\n' % ( node.short(prec), node.short(succs[0]))) if len(succs) > 1: m = 'histedit: %s' for n in succs[1:]: ui.debug(m % node.short(n)) if not keep: if mapping: movebookmarks(ui, repo, mapping, state.topmost, ntm) # TODO update mq state if obsolete.isenabled(repo, obsolete.createmarkersopt): markers = [] # sort by revision number because it sound "right" for prec in sorted(mapping, key=repo.changelog.rev): succs = mapping[prec] markers.append((repo[prec], tuple(repo[s] for s in succs))) if markers: obsolete.createmarkers(repo, markers) else: cleanupnode(ui, repo, 'replaced', mapping) cleanupnode(ui, repo, 'temp', tmpnodes) state.clear() if os.path.exists(repo.sjoin('undo')): os.unlink(repo.sjoin('undo'))
def checkrequireslfiles(ui, repo, **kwargs): if 'largefiles' not in repo.requirements and util.any( lfutil.shortname+'/' in f[0] for f in repo.store.datafiles()): repo.requirements.add('largefiles') repo._writerequirements()
def sign(ui, repo, *revs, **opts): """add a signature for the current or given revision If no revision is given, the parent of the working directory is used, or tip if no revision is checked out. See :hg:`help dates` for a list of formats valid for -d/--date. """ mygpg = newgpg(ui, **opts) sigver = "0" sigmessage = "" date = opts.get('date') if date: opts['date'] = util.parsedate(date) if revs: nodes = [repo.lookup(n) for n in revs] else: nodes = [ node for node in repo.dirstate.parents() if node != hgnode.nullid ] if len(nodes) > 1: raise util.Abort( _('uncommitted merge - please provide a ' 'specific revision')) if not nodes: nodes = [repo.changelog.tip()] for n in nodes: hexnode = hgnode.hex(n) ui.write( _("Signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n))) # build data data = node2txt(repo, n, sigver) sig = mygpg.sign(data) if not sig: raise util.Abort(_("error while signing")) sig = binascii.b2a_base64(sig) sig = sig.replace("\n", "") sigmessage += "%s %s %s\n" % (hexnode, sigver, sig) # write it if opts['local']: repo.opener.append("localsigs", sigmessage) return msigs = match.exact(repo.root, '', ['.hgsigs']) s = repo.status(match=msigs, unknown=True, ignored=True)[:6] if util.any(s) and not opts["force"]: raise util.Abort( _("working copy of .hgsigs is changed " "(please commit .hgsigs manually " "or use --force)")) sigsfile = repo.wfile(".hgsigs", "ab") sigsfile.write(sigmessage) sigsfile.close() if '.hgsigs' not in repo.dirstate: repo[None].add([".hgsigs"]) if opts["no_commit"]: return message = opts['message'] if not message: # we don't translate commit messages message = "\n".join([ "Added signature for changeset %s" % hgnode.short(n) for n in nodes ]) try: repo.commit(message, opts['user'], opts['date'], match=msigs) except ValueError, inst: raise util.Abort(str(inst))
def sign(ui, repo, *revs, **opts): """add a signature for the current or given revision If no revision is given, the parent of the working directory is used, or tip if no revision is checked out. See :hg:`help dates` for a list of formats valid for -d/--date. """ mygpg = newgpg(ui, **opts) sigver = "0" sigmessage = "" date = opts.get("date") if date: opts["date"] = util.parsedate(date) if revs: nodes = [repo.lookup(n) for n in revs] else: nodes = [node for node in repo.dirstate.parents() if node != hgnode.nullid] if len(nodes) > 1: raise util.Abort(_("uncommitted merge - please provide a " "specific revision")) if not nodes: nodes = [repo.changelog.tip()] for n in nodes: hexnode = hgnode.hex(n) ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n))) # build data data = node2txt(repo, n, sigver) sig = mygpg.sign(data) if not sig: raise util.Abort(_("error while signing")) sig = binascii.b2a_base64(sig) sig = sig.replace("\n", "") sigmessage += "%s %s %s\n" % (hexnode, sigver, sig) # write it if opts["local"]: repo.vfs.append("localsigs", sigmessage) return if not opts["force"]: msigs = match.exact(repo.root, "", [".hgsigs"]) if util.any(repo.status(match=msigs, unknown=True, ignored=True)): raise util.Abort(_("working copy of .hgsigs is changed "), hint=_("please commit .hgsigs manually")) sigsfile = repo.wfile(".hgsigs", "ab") sigsfile.write(sigmessage) sigsfile.close() if ".hgsigs" not in repo.dirstate: repo[None].add([".hgsigs"]) if opts["no_commit"]: return message = opts["message"] if not message: # we don't translate commit messages message = "\n".join(["Added signature for changeset %s" % hgnode.short(n) for n in nodes]) try: editor = cmdutil.getcommiteditor(editform="gpg.sign", **opts) repo.commit(message, opts["user"], opts["date"], match=msigs, editor=editor) except ValueError, inst: raise util.Abort(str(inst))
def histedit(ui, repo, *freeargs, **opts): """interactively edit changeset history This command edits changesets between ANCESTOR and the parent of the working directory. With --outgoing, this edits changesets not found in the destination repository. If URL of the destination is omitted, the 'default-push' (or 'default') path will be used. For safety, this command is aborted, also if there are ambiguous outgoing revisions which may confuse users: for example, there are multiple branches containing outgoing revisions. Use "min(outgoing() and ::.)" or similar revset specification instead of --outgoing to specify edit target revision exactly in such ambiguous situation. See :hg:`help revsets` for detail about selecting revisions. """ # TODO only abort if we try and histedit mq patches, not just # blanket if mq patches are applied somewhere mq = getattr(repo, "mq", None) if mq and mq.applied: raise util.Abort(_("source has mq patches applied")) # basic argument incompatibility processing outg = opts.get("outgoing") cont = opts.get("continue") abort = opts.get("abort") force = opts.get("force") rules = opts.get("commands", "") revs = opts.get("rev", []) goal = "new" # This invocation goal, in new, continue, abort if force and not outg: raise util.Abort(_("--force only allowed with --outgoing")) if cont: if util.any((outg, abort, revs, freeargs, rules)): raise util.Abort(_("no arguments allowed with --continue")) goal = "continue" elif abort: if util.any((outg, revs, freeargs, rules)): raise util.Abort(_("no arguments allowed with --abort")) goal = "abort" else: if os.path.exists(os.path.join(repo.path, "histedit-state")): raise util.Abort(_("history edit already in progress, try " "--continue or --abort")) if outg: if revs: raise util.Abort(_("no revisions allowed with --outgoing")) if len(freeargs) > 1: raise util.Abort(_("only one repo argument allowed with --outgoing")) else: revs.extend(freeargs) if len(revs) != 1: raise util.Abort(_("histedit requires exactly one ancestor revision")) if goal == "continue": (parentctxnode, rules, keep, topmost, replacements) = readstate(repo) parentctx = repo[parentctxnode] parentctx, repl = bootstrapcontinue(ui, repo, parentctx, rules, opts) replacements.extend(repl) elif goal == "abort": (parentctxnode, rules, keep, topmost, replacements) = readstate(repo) mapping, tmpnodes, leafs, _ntm = processreplacement(repo, replacements) ui.debug("restore wc to old parent %s\n" % node.short(topmost)) # check whether we should update away parentnodes = [c.node() for c in repo[None].parents()] for n in leafs | set([parentctxnode]): if n in parentnodes: hg.clean(repo, topmost) break else: pass cleanupnode(ui, repo, "created", tmpnodes) cleanupnode(ui, repo, "temp", leafs) os.unlink(os.path.join(repo.path, "histedit-state")) return else: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) topmost, empty = repo.dirstate.parents() if outg: if freeargs: remote = freeargs[0] else: remote = None root = findoutgoing(ui, repo, remote, force, opts) else: root = revs[0] root = scmutil.revsingle(repo, root).node() keep = opts.get("keep", False) revs = between(repo, root, topmost, keep) if not revs: raise util.Abort(_("%s is not an ancestor of working directory") % node.short(root)) ctxs = [repo[r] for r in revs] if not rules: rules = "\n".join([makedesc(c) for c in ctxs]) rules += "\n\n" rules += editcomment % (node.short(root), node.short(topmost)) rules = ui.edit(rules, ui.username()) # Save edit rules in .hg/histedit-last-edit.txt in case # the user needs to ask for help after something # surprising happens. f = open(repo.join("histedit-last-edit.txt"), "w") f.write(rules) f.close() else: if rules == "-": f = sys.stdin else: f = open(rules) rules = f.read() f.close() rules = [l for l in (r.strip() for r in rules.splitlines()) if l and not l[0] == "#"] rules = verifyrules(rules, repo, ctxs) parentctx = repo[root].parents()[0] keep = opts.get("keep", False) replacements = [] while rules: writestate(repo, parentctx.node(), rules, keep, topmost, replacements) action, ha = rules.pop(0) ui.debug("histedit: processing %s %s\n" % (action, ha)) actfunc = actiontable[action] parentctx, replacement_ = actfunc(ui, repo, parentctx, ha, opts) replacements.extend(replacement_) hg.update(repo, parentctx.node()) mapping, tmpnodes, created, ntm = processreplacement(repo, replacements) if mapping: for prec, succs in mapping.iteritems(): if not succs: ui.debug("histedit: %s is dropped\n" % node.short(prec)) else: ui.debug("histedit: %s is replaced by %s\n" % (node.short(prec), node.short(succs[0]))) if len(succs) > 1: m = "histedit: %s" for n in succs[1:]: ui.debug(m % node.short(n)) if not keep: if mapping: movebookmarks(ui, repo, mapping, topmost, ntm) # TODO update mq state if obsolete._enabled: markers = [] # sort by revision number because it sound "right" for prec in sorted(mapping, key=repo.changelog.rev): succs = mapping[prec] markers.append((repo[prec], tuple(repo[s] for s in succs))) if markers: obsolete.createmarkers(repo, markers) else: cleanupnode(ui, repo, "replaced", mapping) cleanupnode(ui, repo, "temp", tmpnodes) os.unlink(os.path.join(repo.path, "histedit-state")) if os.path.exists(repo.sjoin("undo")): os.unlink(repo.sjoin("undo"))
def special(self): return util.any(self.special_re.match(h) for h in self.header)
def _isinterceptable(self, event): if event.key() in self._keys: return True if util.any(event.matches(e) for e in self._keyseqs): return True return False
def commit(self, text="", user=None, date=None, match=None, force=False, editor=False, extra={}): orig = super(lfiles_repo, self).commit wlock = repo.wlock() try: # Case 0: Rebase or Transplant # We have to take the time to pull down the new largefiles now. # Otherwise, any largefiles that were modified in the # destination changesets get overwritten, either by the rebase # or in the first commit after the rebase or transplant. # updatelfiles will update the dirstate to mark any pulled # largefiles as modified if getattr(repo, "_isrebasing", False) or \ getattr(repo, "_istransplanting", False): lfcommands.updatelfiles(repo.ui, repo, filelist=None, printmessage=False) result = orig(text=text, user=user, date=date, match=match, force=force, editor=editor, extra=extra) return result # Case 1: user calls commit with no specific files or # include/exclude patterns: refresh and commit all files that # are "dirty". if ((match is None) or (not match.anypats() and not match.files())): # Spend a bit of time here to get a list of files we know # are modified so we can compare only against those. # It can cost a lot of time (several seconds) # otherwise to update all standins if the largefiles are # large. lfdirstate = lfutil.openlfdirstate(ui, self) dirtymatch = match_.always(repo.root, repo.getcwd()) s = lfdirstate.status(dirtymatch, [], False, False, False) modifiedfiles = [] for i in s: modifiedfiles.extend(i) lfiles = lfutil.listlfiles(self) # this only loops through largefiles that exist (not # removed/renamed) for lfile in lfiles: if lfile in modifiedfiles: if os.path.exists(self.wjoin( lfutil.standin(lfile))): # this handles the case where a rebase is being # performed and the working copy is not updated # yet. if os.path.exists(self.wjoin(lfile)): lfutil.updatestandin( self, lfutil.standin(lfile)) lfdirstate.normal(lfile) for lfile in lfdirstate: if lfile in modifiedfiles: if not os.path.exists( repo.wjoin(lfutil.standin(lfile))): lfdirstate.drop(lfile) result = orig(text=text, user=user, date=date, match=match, force=force, editor=editor, extra=extra) # This needs to be after commit; otherwise precommit hooks # get the wrong status lfdirstate.write() return result for f in match.files(): if lfutil.isstandin(f): raise util.Abort( _('file "%s" is a largefile standin') % f, hint=('commit the largefile itself instead')) # Case 2: user calls commit with specified patterns: refresh # any matching big files. smatcher = lfutil.composestandinmatcher(self, match) standins = lfutil.dirstate_walk(self.dirstate, smatcher) # No matching big files: get out of the way and pass control to # the usual commit() method. if not standins: return orig(text=text, user=user, date=date, match=match, force=force, editor=editor, extra=extra) # Refresh all matching big files. It's possible that the # commit will end up failing, in which case the big files will # stay refreshed. No harm done: the user modified them and # asked to commit them, so sooner or later we're going to # refresh the standins. Might as well leave them refreshed. lfdirstate = lfutil.openlfdirstate(ui, self) for standin in standins: lfile = lfutil.splitstandin(standin) if lfdirstate[lfile] <> 'r': lfutil.updatestandin(self, standin) lfdirstate.normal(lfile) else: lfdirstate.drop(lfile) # Cook up a new matcher that only matches regular files or # standins corresponding to the big files requested by the # user. Have to modify _files to prevent commit() from # complaining "not tracked" for big files. lfiles = lfutil.listlfiles(repo) match = copy.copy(match) orig_matchfn = match.matchfn # Check both the list of largefiles and the list of # standins because if a largefile was removed, it # won't be in the list of largefiles at this point match._files += sorted(standins) actualfiles = [] for f in match._files: fstandin = lfutil.standin(f) # ignore known largefiles and standins if f in lfiles or fstandin in standins: continue # append directory separator to avoid collisions if not fstandin.endswith(os.sep): fstandin += os.sep # prevalidate matching standin directories if util.any(st for st in match._files if st.startswith(fstandin)): continue actualfiles.append(f) match._files = actualfiles def matchfn(f): if orig_matchfn(f): return f not in lfiles else: return f in standins match.matchfn = matchfn result = orig(text=text, user=user, date=date, match=match, force=force, editor=editor, extra=extra) # This needs to be after commit; otherwise precommit hooks # get the wrong status lfdirstate.write() return result finally: wlock.release()
# execute the command cmd = args.pop(0) try: self._cmdtable[cmd](self, args) except KeyError: return self._runextcommand(cmdline) def _parsecmdline(self, cmdline): """Split command line string to imitate a unix shell""" try: args = shlex.split(hglib.fromunicode(cmdline)) except ValueError, e: raise ValueError(_('command parse error: %s') % e) for e in args: e = util.expandpath(e) if util.any(c in e for c in '*?[]'): expanded = glob.glob(os.path.join(self.cwd, e)) if not expanded: raise ValueError(_('no matches found: %s') % hglib.tounicode(e)) for p in expanded: yield p else: yield e def _runextcommand(self, cmdline): self._extproc.setWorkingDirectory(hglib.tounicode(self.cwd)) self._extproc.start(cmdline, QIODevice.ReadOnly) @_cmdtable def _cmd_hg(self, args):
def isactive(start, end, color, line_type, children, rev): return rev in revset and util.any(r in revset for r in children)
def islfilesrepo(repo): return ('largefiles' in repo.requirements and util.any(shortname + '/' in f[0] for f in repo.store.datafiles()))
def _histedit(ui, repo, state, *freeargs, **opts): # TODO only abort if we try and histedit mq patches, not just # blanket if mq patches are applied somewhere mq = getattr(repo, 'mq', None) if mq and mq.applied: raise util.Abort(_('source has mq patches applied')) # basic argument incompatibility processing outg = opts.get('outgoing') cont = opts.get('continue') editplan = opts.get('edit_plan') abort = opts.get('abort') force = opts.get('force') rules = opts.get('commands', '') revs = opts.get('rev', []) goal = 'new' # This invocation goal, in new, continue, abort if force and not outg: raise util.Abort(_('--force only allowed with --outgoing')) if cont: if util.any((outg, abort, revs, freeargs, rules, editplan)): raise util.Abort(_('no arguments allowed with --continue')) goal = 'continue' elif abort: if util.any((outg, revs, freeargs, rules, editplan)): raise util.Abort(_('no arguments allowed with --abort')) goal = 'abort' elif editplan: if util.any((outg, revs, freeargs)): raise util.Abort(_('only --commands argument allowed with ' '--edit-plan')) goal = 'edit-plan' else: if os.path.exists(os.path.join(repo.path, 'histedit-state')): raise util.Abort(_('history edit already in progress, try ' '--continue or --abort')) if outg: if revs: raise util.Abort(_('no revisions allowed with --outgoing')) if len(freeargs) > 1: raise util.Abort( _('only one repo argument allowed with --outgoing')) else: revs.extend(freeargs) if len(revs) == 0: histeditdefault = ui.config('histedit', 'defaultrev') if histeditdefault: revs.append(histeditdefault) if len(revs) != 1: raise util.Abort( _('histedit requires exactly one ancestor revision')) replacements = [] keep = opts.get('keep', False) # rebuild state if goal == 'continue': state.read() state = bootstrapcontinue(ui, state, opts) elif goal == 'edit-plan': state.read() if not rules: comment = editcomment % (state.parentctx, node.short(state.topmost)) rules = ruleeditor(repo, ui, state.rules, comment) else: if rules == '-': f = sys.stdin else: f = open(rules) rules = f.read() f.close() rules = [l for l in (r.strip() for r in rules.splitlines()) if l and not l.startswith('#')] rules = verifyrules(rules, repo, [repo[c] for [_a, c] in state.rules]) state.rules = rules state.write() return elif goal == 'abort': state.read() mapping, tmpnodes, leafs, _ntm = processreplacement(state) ui.debug('restore wc to old parent %s\n' % node.short(state.topmost)) # Recover our old commits if necessary if not state.topmost in repo and state.backupfile: backupfile = repo.join(state.backupfile) f = hg.openpath(ui, backupfile) gen = exchange.readbundle(ui, f, backupfile) changegroup.addchangegroup(repo, gen, 'histedit', 'bundle:' + backupfile) os.remove(backupfile) # check whether we should update away parentnodes = [c.node() for c in repo[None].parents()] for n in leafs | set([state.parentctxnode]): if n in parentnodes: hg.clean(repo, state.topmost) break else: pass cleanupnode(ui, repo, 'created', tmpnodes) cleanupnode(ui, repo, 'temp', leafs) state.clear() return else: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) topmost, empty = repo.dirstate.parents() if outg: if freeargs: remote = freeargs[0] else: remote = None root = findoutgoing(ui, repo, remote, force, opts) else: rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs))) if len(rr) != 1: raise util.Abort(_('The specified revisions must have ' 'exactly one common root')) root = rr[0].node() revs = between(repo, root, topmost, keep) if not revs: raise util.Abort(_('%s is not an ancestor of working directory') % node.short(root)) ctxs = [repo[r] for r in revs] if not rules: comment = editcomment % (node.short(root), node.short(topmost)) rules = ruleeditor(repo, ui, [['pick', c] for c in ctxs], comment) else: if rules == '-': f = sys.stdin else: f = open(rules) rules = f.read() f.close() rules = [l for l in (r.strip() for r in rules.splitlines()) if l and not l.startswith('#')] rules = verifyrules(rules, repo, ctxs) parentctxnode = repo[root].parents()[0].node() state.parentctxnode = parentctxnode state.rules = rules state.keep = keep state.topmost = topmost state.replacements = replacements # Create a backup so we can always abort completely. backupfile = None if not obsolete.isenabled(repo, obsolete.createmarkersopt): backupfile = repair._bundle(repo, [parentctxnode], [topmost], root, 'histedit') state.backupfile = backupfile while state.rules: state.write() action, ha = state.rules.pop(0) ui.debug('histedit: processing %s %s\n' % (action, ha[:12])) actobj = actiontable[action].fromrule(state, ha) parentctx, replacement_ = actobj.run() state.parentctxnode = parentctx.node() state.replacements.extend(replacement_) state.write() hg.update(repo, state.parentctxnode) mapping, tmpnodes, created, ntm = processreplacement(state) if mapping: for prec, succs in mapping.iteritems(): if not succs: ui.debug('histedit: %s is dropped\n' % node.short(prec)) else: ui.debug('histedit: %s is replaced by %s\n' % ( node.short(prec), node.short(succs[0]))) if len(succs) > 1: m = 'histedit: %s' for n in succs[1:]: ui.debug(m % node.short(n)) if not keep: if mapping: movebookmarks(ui, repo, mapping, state.topmost, ntm) # TODO update mq state if obsolete.isenabled(repo, obsolete.createmarkersopt): markers = [] # sort by revision number because it sound "right" for prec in sorted(mapping, key=repo.changelog.rev): succs = mapping[prec] markers.append((repo[prec], tuple(repo[s] for s in succs))) if markers: obsolete.createmarkers(repo, markers) else: cleanupnode(ui, repo, 'replaced', mapping) cleanupnode(ui, repo, 'temp', tmpnodes) state.clear() if os.path.exists(repo.sjoin('undo')): os.unlink(repo.sjoin('undo'))
def allhunks(self): return util.any(self.allhunks_re.match(h) for h in self.header)
def getchanges(self, rev): parents = self.commits[rev].parents if len(parents) > 1: self.rebuild() # To decide whether we're interested in rev we: # # - calculate what parents rev will have if it turns out we're # interested in it. If it's going to have more than 1 parent, # we're interested in it. # # - otherwise, we'll compare it with the single parent we found. # If any of the files we're interested in is different in the # the two revisions, we're interested in rev. # A parent p is interesting if its mapped version (self.parentmap[p]): # - is not SKIPREV # - is still not in the list of parents (we don't want duplicates) # - is not an ancestor of the mapped versions of the other parents or # there is no parent in the same branch than the current revision. mparents = [] knownparents = set() branch = self.commits[rev].branch hasbranchparent = False for i, p1 in enumerate(parents): mp1 = self.parentmap[p1] if mp1 == SKIPREV or mp1 in knownparents: continue isancestor = util.any(p2 for p2 in parents if p1 != p2 and mp1 != self.parentmap[p2] and mp1 in self.wantedancestors[p2]) if not isancestor and not hasbranchparent and len(parents) > 1: # This could be expensive, avoid unnecessary calls. if self._cachedcommit(p1).branch == branch: hasbranchparent = True mparents.append((p1, mp1, i, isancestor)) knownparents.add(mp1) # Discard parents ancestors of other parents if there is a # non-ancestor one on the same branch than current revision. if hasbranchparent: mparents = [p for p in mparents if not p[3]] wp = None if mparents: wp = max(p[2] for p in mparents) mparents = [p[1] for p in mparents] elif parents: wp = 0 self.origparents[rev] = parents closed = False if 'close' in self.commits[rev].extra: # A branch closing revision is only useful if one of its # parents belong to the branch being closed pbranches = [self._cachedcommit(p).branch for p in mparents] if branch in pbranches: closed = True if len(mparents) < 2 and not closed and not self.wanted(rev, wp): # We don't want this revision. # Update our state and tell the convert process to map this # revision to the same revision its parent as mapped to. p = None if parents: p = parents[wp] self.mark_not_wanted(rev, p) self.convertedorder.append((rev, False, p)) self._discard(*parents) return self.parentmap[rev] # We want this revision. # Rewrite the parents of the commit object self.commits[rev].parents = mparents self.mark_wanted(rev, parents) self.convertedorder.append((rev, True, None)) self._discard(*parents) # Get the real changes and do the filtering/mapping. To be # able to get the files later on in getfile, we hide the # original filename in the rev part of the return value. changes, copies = self.base.getchanges(rev) files = {} for f, r in changes: newf = self.filemapper(f) if newf and (newf != f or newf not in files): files[newf] = (f, r) files = sorted(files.items()) ncopies = {} for c in copies: newc = self.filemapper(c) if newc: newsource = self.filemapper(copies[c]) if newsource: ncopies[newc] = newsource return files, ncopies
def binary(self): return util.any(h.startswith('index ') for h in self.header)
def sign(ui, repo, *revs, **opts): """add a signature for the current or given revision If no revision is given, the parent of the working directory is used, or tip if no revision is checked out. See 'hg help dates' for a list of formats valid for -d/--date. """ mygpg = SSHAuthority.from_ui(ui) sigver = "0" sigmessage = "" date = opts.get('date') if date: opts['date'] = util.parsedate(date) if revs: nodes = [repo.lookup(n) for n in revs] else: nodes = [node for node in repo.dirstate.parents() if node != hgnode.nullid] if len(nodes) > 1: raise util.Abort(_('uncommitted merge - please provide a ' 'specific revision')) if not nodes: nodes = [repo.changelog.tip()] for n in nodes: hexnode = hgnode.hex(n) ui.write(_("Signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n))) # build data data = node2txt(repo, n, sigver) sig = mygpg.sign(data) if not sig: raise util.Abort(_("Error while signing")) sig = binascii.b2a_base64(sig) sig = sig.replace("\n", "") sigmessage += "%s %s %s\n" % (hexnode, sigver, sig) # write it if opts['local']: repo.opener("localsigs", "ab").write(sigmessage) return msigs = match.exact(repo.root, '', ['.hgsshsigs']) s = repo.status(match=msigs, unknown=True, ignored=True)[:6] if util.any(s) and not opts["force"]: raise util.Abort(_("working copy of .hgsshsigs is changed " "(please commit .hgsshsigs manually " "or use --force)")) repo.wfile(".hgsshsigs", "ab").write(sigmessage) if '.hgsshsigs' not in repo.dirstate: repo.add([".hgsshsigs"]) if opts["no_commit"]: return message = opts['message'] if not message: # we don't translate commit messages message = "\n".join(["Added signature for changeset %s" % hgnode.short(n) for n in nodes]) try: repo.commit(message, opts['user'], opts['date'], match=msigs) except ValueError, inst: raise util.Abort(str(inst))
def commit(self, text="", user=None, date=None, match=None, force=False, editor=False, extra={}): orig = super(lfiles_repo, self).commit wlock = repo.wlock() try: if getattr(repo, "_isrebasing", False): # We have to take the time to pull down the new # largefiles now. Otherwise if we are rebasing, # any largefiles that were modified in the # destination changesets get overwritten, either # by the rebase or in the first commit after the # rebase. lfcommands.updatelfiles(repo.ui, repo) # Case 1: user calls commit with no specific files or # include/exclude patterns: refresh and commit all files that # are "dirty". if ((match is None) or (not match.anypats() and not match.files())): # Spend a bit of time here to get a list of files we know # are modified so we can compare only against those. # It can cost a lot of time (several seconds) # otherwise to update all standins if the largefiles are # large. lfdirstate = lfutil.openlfdirstate(ui, self) dirtymatch = match_.always(repo.root, repo.getcwd()) s = lfdirstate.status(dirtymatch, [], False, False, False) modifiedfiles = [] for i in s: modifiedfiles.extend(i) lfiles = lfutil.listlfiles(self) # this only loops through largefiles that exist (not # removed/renamed) for lfile in lfiles: if lfile in modifiedfiles: if os.path.exists(self.wjoin(lfutil.standin(lfile))): # this handles the case where a rebase is being # performed and the working copy is not updated # yet. if os.path.exists(self.wjoin(lfile)): lfutil.updatestandin(self, lfutil.standin(lfile)) lfdirstate.normal(lfile) for lfile in lfdirstate: if lfile in modifiedfiles: if not os.path.exists( repo.wjoin(lfutil.standin(lfile))): lfdirstate.drop(lfile) lfdirstate.write() return orig(text=text, user=user, date=date, match=match, force=force, editor=editor, extra=extra) for f in match.files(): if lfutil.isstandin(f): raise util.Abort( _('file "%s" is a largefile standin') % f, hint=('commit the largefile itself instead')) # Case 2: user calls commit with specified patterns: refresh # any matching big files. smatcher = lfutil.composestandinmatcher(self, match) standins = lfutil.dirstate_walk(self.dirstate, smatcher) # No matching big files: get out of the way and pass control to # the usual commit() method. if not standins: return orig(text=text, user=user, date=date, match=match, force=force, editor=editor, extra=extra) # Refresh all matching big files. It's possible that the # commit will end up failing, in which case the big files will # stay refreshed. No harm done: the user modified them and # asked to commit them, so sooner or later we're going to # refresh the standins. Might as well leave them refreshed. lfdirstate = lfutil.openlfdirstate(ui, self) for standin in standins: lfile = lfutil.splitstandin(standin) if lfdirstate[lfile] <> 'r': lfutil.updatestandin(self, standin) lfdirstate.normal(lfile) else: lfdirstate.drop(lfile) lfdirstate.write() # Cook up a new matcher that only matches regular files or # standins corresponding to the big files requested by the # user. Have to modify _files to prevent commit() from # complaining "not tracked" for big files. lfiles = lfutil.listlfiles(repo) match = copy.copy(match) orig_matchfn = match.matchfn # Check both the list of largefiles and the list of # standins because if a largefile was removed, it # won't be in the list of largefiles at this point match._files += sorted(standins) actualfiles = [] for f in match._files: fstandin = lfutil.standin(f) # ignore known largefiles and standins if f in lfiles or fstandin in standins: continue # append directory separator to avoid collisions if not fstandin.endswith(os.sep): fstandin += os.sep # prevalidate matching standin directories if util.any(st for st in match._files if st.startswith(fstandin)): continue actualfiles.append(f) match._files = actualfiles def matchfn(f): if orig_matchfn(f): return f not in lfiles else: return f in standins match.matchfn = matchfn return orig(text=text, user=user, date=date, match=match, force=force, editor=editor, extra=extra) finally: wlock.release()
return cmd = args.pop(0) try: self._cmdtable[cmd](self, args) except KeyError: return self._runextcommand(cmdline) def _parsecmdline(self, cmdline): """Split command line string to imitate a unix shell""" try: args = shlex.split(hglib.fromunicode(cmdline)) except ValueError, e: raise ValueError(_("command parse error: %s") % e) for e in args: e = util.expandpath(e) if util.any(c in e for c in "*?[]"): expanded = glob.glob(os.path.join(self.cwd, e)) if not expanded: raise ValueError(_("no matches found: %s") % hglib.tounicode(e)) for p in expanded: yield p else: yield e def _runextcommand(self, cmdline): self._extproc.setWorkingDirectory(hglib.tounicode(self.cwd)) self._extproc.start(cmdline, QIODevice.ReadOnly) @_cmdtable def _cmd_hg(self, args): self.closePrompt()
def binary(self): return util.any(h.startswith("index ") for h in self.header)
def _histedit(ui, repo, *freeargs, **opts): # TODO only abort if we try and histedit mq patches, not just # blanket if mq patches are applied somewhere mq = getattr(repo, 'mq', None) if mq and mq.applied: raise util.Abort(_('source has mq patches applied')) # basic argument incompatibility processing outg = opts.get('outgoing') cont = opts.get('continue') abort = opts.get('abort') force = opts.get('force') rules = opts.get('commands', '') revs = opts.get('rev', []) goal = 'new' # This invocation goal, in new, continue, abort if force and not outg: raise util.Abort(_('--force only allowed with --outgoing')) if cont: if util.any((outg, abort, revs, freeargs, rules)): raise util.Abort(_('no arguments allowed with --continue')) goal = 'continue' elif abort: if util.any((outg, revs, freeargs, rules)): raise util.Abort(_('no arguments allowed with --abort')) goal = 'abort' else: if os.path.exists(os.path.join(repo.path, 'histedit-state')): raise util.Abort(_('history edit already in progress, try ' '--continue or --abort')) if outg: if revs: raise util.Abort(_('no revisions allowed with --outgoing')) if len(freeargs) > 1: raise util.Abort( _('only one repo argument allowed with --outgoing')) else: revs.extend(freeargs) if len(revs) != 1: raise util.Abort( _('histedit requires exactly one ancestor revision')) if goal == 'continue': (parentctxnode, rules, keep, topmost, replacements) = readstate(repo) parentctx = repo[parentctxnode] parentctx, repl = bootstrapcontinue(ui, repo, parentctx, rules, opts) replacements.extend(repl) elif goal == 'abort': (parentctxnode, rules, keep, topmost, replacements) = readstate(repo) mapping, tmpnodes, leafs, _ntm = processreplacement(repo, replacements) ui.debug('restore wc to old parent %s\n' % node.short(topmost)) # check whether we should update away parentnodes = [c.node() for c in repo[None].parents()] for n in leafs | set([parentctxnode]): if n in parentnodes: hg.clean(repo, topmost) break else: pass cleanupnode(ui, repo, 'created', tmpnodes) cleanupnode(ui, repo, 'temp', leafs) os.unlink(os.path.join(repo.path, 'histedit-state')) return else: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) topmost, empty = repo.dirstate.parents() if outg: if freeargs: remote = freeargs[0] else: remote = None root = findoutgoing(ui, repo, remote, force, opts) else: rootrevs = list(repo.set('roots(%lr)', revs)) if len(rootrevs) != 1: raise util.Abort(_('The specified revisions must have ' 'exactly one common root')) root = rootrevs[0].node() keep = opts.get('keep', False) revs = between(repo, root, topmost, keep) if not revs: raise util.Abort(_('%s is not an ancestor of working directory') % node.short(root)) ctxs = [repo[r] for r in revs] if not rules: rules = '\n'.join([makedesc(c) for c in ctxs]) rules += '\n\n' rules += editcomment % (node.short(root), node.short(topmost)) rules = ui.edit(rules, ui.username()) # Save edit rules in .hg/histedit-last-edit.txt in case # the user needs to ask for help after something # surprising happens. f = open(repo.join('histedit-last-edit.txt'), 'w') f.write(rules) f.close() else: if rules == '-': f = sys.stdin else: f = open(rules) rules = f.read() f.close() rules = [l for l in (r.strip() for r in rules.splitlines()) if l and not l[0] == '#'] rules = verifyrules(rules, repo, ctxs) parentctx = repo[root].parents()[0] keep = opts.get('keep', False) replacements = [] while rules: writestate(repo, parentctx.node(), rules, keep, topmost, replacements) action, ha = rules.pop(0) ui.debug('histedit: processing %s %s\n' % (action, ha)) actfunc = actiontable[action] parentctx, replacement_ = actfunc(ui, repo, parentctx, ha, opts) replacements.extend(replacement_) hg.update(repo, parentctx.node()) mapping, tmpnodes, created, ntm = processreplacement(repo, replacements) if mapping: for prec, succs in mapping.iteritems(): if not succs: ui.debug('histedit: %s is dropped\n' % node.short(prec)) else: ui.debug('histedit: %s is replaced by %s\n' % ( node.short(prec), node.short(succs[0]))) if len(succs) > 1: m = 'histedit: %s' for n in succs[1:]: ui.debug(m % node.short(n)) if not keep: if mapping: movebookmarks(ui, repo, mapping, topmost, ntm) # TODO update mq state if obsolete._enabled: markers = [] # sort by revision number because it sound "right" for prec in sorted(mapping, key=repo.changelog.rev): succs = mapping[prec] markers.append((repo[prec], tuple(repo[s] for s in succs))) if markers: obsolete.createmarkers(repo, markers) else: cleanupnode(ui, repo, 'replaced', mapping) cleanupnode(ui, repo, 'temp', tmpnodes) os.unlink(os.path.join(repo.path, 'histedit-state')) if os.path.exists(repo.sjoin('undo')): os.unlink(repo.sjoin('undo'))