def recordfunc(ui, repo, message, match, opts): """This is generic record driver. It's job is to interactively filter local changes, and accordingly prepare working dir into a state, where the job can be delegated to non-interactive commit command such as 'commit' or 'qrefresh'. After the actual job is done by non-interactive command, working dir state is restored to original. In the end we'll record intresting changes, and everything else will be left in place, so the user can continue his work. """ if match.files(): changes = None else: changes = repo.status(match=match)[:3] modified, added, removed = changes match = cmdutil.matchfiles(repo, modified + added + removed) diffopts = mdiff.diffopts(git=True, nodates=True) chunks = patch.diff(repo, repo.dirstate.parents()[0], match=match, changes=changes, opts=diffopts) fp = cStringIO.StringIO() fp.write(''.join(chunks)) fp.seek(0) # 1. filter patch, so we have intending-to apply subset of it if changes is not None: chunks = filterpatch(opts, parsepatch(changes, fp), chunkselector) else: chgs = repo.status(match=match)[:3] chunks = filterpatch(opts, parsepatch(chgs, fp), chunkselector) del fp contenders = {} for h in chunks: try: contenders.update(dict.fromkeys(h.files())) except AttributeError: pass newfiles = [f for f in match.files() if f in contenders] if not newfiles: ui.status(_('no changes to record\n')) return 0 if changes is None: match = cmdutil.matchfiles(repo, newfiles) changes = repo.status(match=match) modified = dict.fromkeys(changes[0]) # 2. backup changed files, so we can restore them in the end backups = {} backupdir = repo.join('record-backups') try: os.mkdir(backupdir) except OSError, err: if err.errno != errno.EEXIST: raise
def generate_text_diffs(self, row): wfile = self.filemodel[row][FM_PATH] pfile = util.pconvert(wfile) lines = chunks.check_max_diff(self.get_ctx(), pfile) if lines: return self.diff_highlight_buffer(lines) matcher = cmdutil.matchfiles(self.repo, [pfile]) opts = patch.diffopts(self.ui, self.opts) opts.git = True difftext = [] if self.is_merge(): wctx = self.repo[None] pctx1, pctx2 = wctx.parents() difftext = [_('===== Diff to first parent %d:%s =====\n') % ( pctx1.rev(), str(pctx1))] try: for s in patch.diff(self.repo, pctx1.node(), None, match=matcher, opts=opts): difftext.extend(s.splitlines(True)) difftext.append(_('\n===== Diff to second parent %d:%s =====\n') % ( pctx2.rev(), str(pctx2))) for s in patch.diff(self.repo, pctx2.node(), None, match=matcher, opts=opts): difftext.extend(s.splitlines(True)) except (IOError, error.RepoError, error.LookupError, util.Abort), e: self.stbar.set_text(str(e))
def _check_changed(self): """Test if there is an uncommitted merge or .hgtags is changed if global tags are used""" if self.repo.dirstate.parents()[1] != node.nullid: raise util.Abort(_('outstanding uncommitted merge')) if self.opts.get('upload') and self.useGlobal and not self.opts.get('only'): status = self.repo.status('.', None, cmdutil.matchfiles(self.repo, ['.hgtags'])) if max(status): raise util.Abort(_('outstanding uncommitted .hgtags'))
def changedlines(ui, repo, ctx1, ctx2, fns): lines = 0 fmatch = cmdutil.matchfiles(repo, fns) diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch)) for l in diff.split('\n'): if (l.startswith("+") and not l.startswith("+++ ") or l.startswith("-") and not l.startswith("--- ")): lines += 1 return lines
def changedlines(ui, repo, ctx1, ctx2, fns): added, removed = 0, 0 fmatch = cmdutil.matchfiles(repo, fns) diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch)) for l in diff.split('\n'): if l.startswith("+") and not l.startswith("+++ "): added += 1 elif l.startswith("-") and not l.startswith("--- "): removed += 1 return (added, removed)
def _check_changed(self): """Test if there is an uncommitted merge or .hgtags is changed if global tags are used""" if self.repo.dirstate.parents()[1] != node.nullid: raise util.Abort(_('outstanding uncommitted merge')) if self.opts.get( 'upload') and self.useGlobal and not self.opts.get('only'): status = self.repo.status( '.', None, cmdutil.matchfiles(self.repo, ['.hgtags'])) if max(status): raise util.Abort(_('outstanding uncommitted .hgtags'))
def read_file_chunks(self, wfile): 'Get diffs of working file, parse into (c)hunks' difftext = cStringIO.StringIO() pfile = util.pconvert(wfile) lines = check_max_diff(self.stat.get_ctx(), pfile) if lines: difftext.writelines(lines) difftext.seek(0) else: matcher = cmdutil.matchfiles(self.stat.repo, [pfile]) diffopts = mdiff.diffopts(git=True, nodates=True) try: node1, node2 = self.stat.nodes() for s in patch.diff(self.stat.repo, node1, node2, match=matcher, opts=diffopts): difftext.writelines(s.splitlines(True)) except (IOError, error.RepoError, error.LookupError, util.Abort), e: self.stat.stbar.set_text(str(e)) difftext.seek(0)
def checkChangeCtxDiff(ui, repo, changecontexts, testFunc, testDesc, fileEndings): '''Loop through each diff for each change and run the testFunc against each line''' ui.debug('Checking %s\n' % testDesc) for ctx in changecontexts: # Get the diff for each change and file for file in [f for f in ctx.files() if f.endswith(fileEndings)]: ui.debug('checking change: %s, file: %s\n' % (short(ctx.node()), file)) fmatch = cmdutil.matchfiles(repo,[file]) # diff from this nodes parent to current node diff = ''.join(patch.diff(repo, ctx.parents()[0].node(), ctx.node(), fmatch)).split('\n') for i in range(3, len(diff)): # start checking after diff header line = diff[i] if line.startswith('@@'): diffLocation = line # only check new lines added/modified in the file if line.startswith('+'): ui.debug('\nchecking line for %s: %s\n\n' % (testDesc, line)) testResult, errorLocation = testFunc(line) if testResult: ui.warn('\n%s(s) found in %s for rev %s (change %s):\n' % (testDesc, file, ctx.rev(), short(ctx.node()))) ui.warn('%s\n' % diffLocation) ui.warn('%s\n' % line) ui.warn('%s^\n' % (' '*errorLocation,)) # show a pointer to error try: response = ui.promptchoice('(n)o, (y)es, (a)llow %ss for current file\n' % testDesc + 'Are you sure you want to commit this change? [n]: ' , (('&No'), ('&Yes'), ('&Allow')), 0) except AttributeError: ui.warn('This commit hook requires that you have mercurial 1.4+ installed. Please upgrade your hg installation.') response = 0 if response == 1: # next occurance in file continue elif response == 2: # next file break else: ui.warn('Aborting commit due to %s.\n' % testDesc) # error = True return True return False
def matcher(self, pats=None, opts=None, files=None): '''Return a match object suitable for Mercurial based on specified criteria. If files is specified it is a list of pathnames relative to the repository root to be matched precisely. If pats and/or opts are specified, these are as to cmdutil.match''' of_patterns = pats is not None or opts is not None of_files = files is not None opts = opts or {} # must be a dict assert not (of_patterns and of_files) if of_patterns: return cmdutil.match(self.repo, pats, opts) elif of_files: return cmdutil.matchfiles(self.repo, files) else: return cmdutil.matchall(self.repo)
def shelvefunc(ui, repo, message, match, opts): files = [] if match.files(): changes = None else: changes = repo.status(match=match)[:3] modified, added, removed = changes files = modified + added + removed match = cmdutil.matchfiles(repo, files) diffopts = repo.attic.diffopts({'git': True, 'nodates': True}) chunks = patch.diff(repo, repo.dirstate.parents()[0], match=match, changes=changes, opts=diffopts) fp = cStringIO.StringIO() fp.write(''.join(chunks)) fp.seek(0) # 1. filter patch, so we have intending-to apply subset of it ac = record.parsepatch(fp) chunks = record.filterpatch(ui, ac) # and a not-intending-to apply subset of it rc = refilterpatch(ac, chunks) del fp contenders = {} for h in chunks: try: contenders.update(dict.fromkeys(h.files())) except AttributeError: pass newfiles = [f for f in files if f in contenders] if not newfiles: ui.status(_('no changes to shelve\n')) return 0 modified = dict.fromkeys(changes[0]) backups = {} backupdir = repo.join('shelve-backups') try: bkfiles = [f for f in newfiles if f in modified] backups = makebackup(ui, repo, backupdir, bkfiles) # patch to shelve sp = cStringIO.StringIO() for c in chunks: if c.filename() in backups: c.write(sp) doshelve = sp.tell() sp.seek(0) # patch to apply to shelved files fp = cStringIO.StringIO() for c in rc: if c.filename() in backups: c.write(fp) dopatch = fp.tell() fp.seek(0) try: # 3a. apply filtered patch to clean repo (clean) if backups: hg.revert(repo, repo.dirstate.parents()[0], backups.has_key) # 3b. apply filtered patch to clean repo (apply) if dopatch: ui.debug(_('applying patch\n')) ui.debug(fp.getvalue()) patch.internalpatch(fp, ui, 1, repo.root) del fp # 3c. apply filtered patch to clean repo (shelve) if doshelve: ui.debug(_("saving patch to %s\n") % (name)) s = repo.attic f = s.opener(name, 'w') f.write(sp.getvalue()) del f s.currentpatch = name s.persiststate() del sp except: try: for realname, tmpname in backups.iteritems(): ui.debug( _('restoring %r to %r\n') % (tmpname, realname)) util.copyfile(tmpname, repo.wjoin(realname)) except OSError: pass return 0 finally: try: for realname, tmpname in backups.iteritems(): ui.debug( _('removing backup for %r : %r\n') % (realname, tmpname)) os.unlink(tmpname) os.rmdir(backupdir) except OSError: pass
def shelvefunc(ui, repo, message, match, opts): files = [] if match.files(): changes = None else: changes = repo.status(match = match)[:3] modified, added, removed = changes files = modified + added + removed match = cmdutil.matchfiles(repo, files) diffopts = repo.attic.diffopts( {'git':True, 'nodates':True}) chunks = patch.diff(repo, repo.dirstate.parents()[0], match = match, changes = changes, opts = diffopts) fp = cStringIO.StringIO() fp.write(''.join(chunks)) fp.seek(0) # 1. filter patch, so we have intending-to apply subset of it ac = record.parsepatch(fp) chunks = record.filterpatch(ui, ac) # and a not-intending-to apply subset of it rc = refilterpatch(ac, chunks) del fp contenders = {} for h in chunks: try: contenders.update(dict.fromkeys(h.files())) except AttributeError: pass newfiles = [f for f in files if f in contenders] if not newfiles: ui.status(_('no changes to shelve\n')) return 0 modified = dict.fromkeys(changes[0]) backups = {} backupdir = repo.join('shelve-backups') try: bkfiles = [f for f in newfiles if f in modified] backups = makebackup(ui, repo, backupdir, bkfiles) # patch to shelve sp = cStringIO.StringIO() for c in chunks: if c.filename() in backups: c.write(sp) doshelve = sp.tell() sp.seek(0) # patch to apply to shelved files fp = cStringIO.StringIO() for c in rc: if c.filename() in backups: c.write(fp) dopatch = fp.tell() fp.seek(0) try: # 3a. apply filtered patch to clean repo (clean) if backups: hg.revert(repo, repo.dirstate.parents()[0], backups.has_key) # 3b. apply filtered patch to clean repo (apply) if dopatch: ui.debug(_('applying patch\n')) ui.debug(fp.getvalue()) patch.internalpatch(fp, ui, 1, repo.root) del fp # 3c. apply filtered patch to clean repo (shelve) if doshelve: ui.debug(_("saving patch to %s\n") % (name)) s = repo.attic f = s.opener(name, 'w') f.write(sp.getvalue()) del f s.currentpatch = name s.persiststate() del sp except: try: for realname, tmpname in backups.iteritems(): ui.debug(_('restoring %r to %r\n') % (tmpname, realname)) util.copyfile(tmpname, repo.wjoin(realname)) except OSError: pass return 0 finally: try: for realname, tmpname in backups.iteritems(): ui.debug(_('removing backup for %r : %r\n') % (realname, tmpname)) os.unlink(tmpname) os.rmdir(backupdir) except OSError: pass
def matchfilesutil(repo, files): # matchfiles moved from cmdutil to scmutil in hg 1.9 if (hasattr(cmdutil, 'matchfiles')): return cmdutil.matchfiles(repo, files) else: return scmutil.matchfiles(repo, files)