def processFork(self, fromrepo, ctx1, torepo, ctx2, anc_rev): # piece together changed, removed, added, and copies match = None # keep in sync with match above ctx3 = fromrepo.changectx(anc_rev) first_copies = pathcopies(ctx1, ctx3) changed, added, removed = fromrepo.status(ctx1, ctx3, match=match)[:3] logging.debug('changed, added, removed, copies:\n %r %r %r %r', changed, added, removed, first_copies) ctx3 = torepo.changectx(anc_rev) more_copies = pathcopies(ctx3, ctx2) more_reverse = dict((v, k) for k, v in more_copies.iteritems()) more_changed, more_added, more_removed = torepo.status(ctx3, ctx2, match=match)[:3] logging.debug('more_changed, added, removed, copies:\n %r %r %r %r', more_changed, more_added, more_removed, more_copies) copies = _chain(ctx1, ctx2, first_copies, more_copies) # HG INTERNAL # the second step removed a file, strip it from copies, changed # if it's in added, strip, otherwise add to removed check_manifests = set() # moved back and forth, check manifests for f in more_removed: try: changed.remove(f) except ValueError: pass try: added.remove(f) # this file moved from ctx1 to ct2, adjust copies if (f in more_reverse and f in first_copies): if more_reverse[f] == first_copies[f]: #file moving back and forth, check manifests below check_manifests.add(first_copies[f]) except ValueError: removed.append(f) # the second step added a file # strip it from removed, or add it to added for f in more_added: try: removed.remove(f) except ValueError: added.append(f) # see if a change was reverted, both changed, # manifests in to and from match m1 = m2 = None changed = set(changed) # only look at more_changed files we didn't add more_changed = set(more_changed) - set(added) both_changed = (changed & more_changed) | check_manifests # changed may be anything changed first, second, or both changed |= more_changed | check_manifests for tp in both_changed: fp = copies.get(tp, tp) if m1 is None: m1 = ctx1.manifest() m2 = ctx2.manifest() if m1[fp] == m2[tp]: changed.remove(tp) return (sorted(changed), sorted(set(added)), sorted(set(removed)), copies)
def lineranges(opts, path, basectxs, fixctx, content2): """Returns the set of line ranges that should be fixed in a file Of the form [(10, 20), (30, 40)]. This depends on the given base contexts; we must consider lines that have changed versus any of the base contexts, and whether the file has been renamed versus any of them. Another way to understand this is that we exclude line ranges that are common to the file in all base contexts. """ if opts.get('whole'): # Return a range containing all lines. Rely on the diff implementation's # idea of how many lines are in the file, instead of reimplementing it. return difflineranges('', content2) rangeslist = [] for basectx in basectxs: basepath = copies.pathcopies(basectx, fixctx).get(path, path) if basepath in basectx: content1 = basectx[basepath].data() else: content1 = '' rangeslist.extend(difflineranges(content1, content2)) return unionranges(rangeslist)
def copy_commit(repo, ctx, parent, date): mf = ctx.manifest() copied = copies.pathcopies(ctx.p1(), ctx) def _filectxfn(repo, memctx, path): if path in mf: fctx = ctx[path] flags = fctx.flags() return context.memfilectx( fctx.path(), fctx.data(), islink='l' in flags, isexec='x' in flags, copied=copied.get(path)) raise IOError new = context.memctx( repo, parents=(parent.node(), repo[-1].node()), text=ctx.description(), files=ctx.files(), filectxfn=_filectxfn, user=ctx.user(), date=date, extra=ctx.extra()) return repo.commitctx(new)
def contextsAndPaths(self, _from, _to, suggested_repo): repopath = settings.REPOSITORY_BASE + '/' + suggested_repo ui = _ui() repo = repository(ui, repopath) # Convert the 'from' and 'to' to strings (instead of unicode) # in case mercurial needs to look for the key in binary data. # This prevents UnicodeWarning messages. try: self.ctx1, fromrepo, dbfrom = self.contextAndRepo(_from, repo) except RepoLookupError: raise BadRevision("Unrecognized 'from' parameter") try: self.ctx2, torepo, dbto = self.contextAndRepo(_to, repo) except RepoLookupError: raise BadRevision("Unrecognized 'to' parameter") if fromrepo == torepo: copies = pathcopies(self.ctx1, self.ctx2) match = None # maybe get something from l10n.ini and cmdutil changed, added, removed = repo.status(self.ctx1, self.ctx2, match=match)[:3] else: # find descent ancestor for ctx1 and ctx2 try: anc_rev = (Changeset.objects .exclude(id=1) # exclude rev 0000 .filter(repositories=dbfrom) .filter(repositories=dbto) .filter(branch=1) .order_by('-pk') .values_list('revision', flat=True))[0] # mercurial doesn't like unicode anc_rev = str(anc_rev) except IndexError: raise BadRevision("from and to parameter are not connected") changed, added, removed, copies = \ self.processFork(fromrepo, self.ctx1, torepo, self.ctx2, anc_rev) # split up the copies info into thos that were renames and those that # were copied. self.moved = {} self.copied = {} for new_name, old_name in copies.items(): if old_name in removed: self.moved[new_name] = old_name else: self.copied[new_name] = old_name paths = ([(f, 'changed') for f in changed] + [(f, 'removed') for f in removed if f not in self.moved.values()] + [(f, (f in self.moved and 'moved') or (f in self.copied and 'copied') or 'added') for f in added]) return paths
def _checkRenamed(self, repo, ctx, pctx, wfile): m = match.exact(repo, '', [wfile]) copy = copies.pathcopies(pctx, ctx, match=m) oldname = copy.get(wfile) if not oldname: self.flabel += _(' <i>(was added)</i>') return fr = hglib.tounicode(oldname) if oldname in ctx: self.flabel += _(' <i>(copied from %s)</i>') % fr else: self.flabel += _(' <i>(renamed from %s)</i>') % fr return oldname
def getbasepaths(repo, opts, workqueue, basectxs): if opts.get(b'whole'): # Base paths will never be fetched for line range determination. return {} basepaths = {} for rev, path in workqueue: fixctx = repo[rev] for basectx in basectxs[rev]: basepath = copies.pathcopies(basectx, fixctx).get(path, path) if basepath in basectx: basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath return basepaths
def _commitfiltered(repo, ctx, match, keepcommit): """Recommit ctx with changed files not in match. Return the new node identifier, or None if nothing changed. """ base = ctx.p1() # ctx initialfiles = set(ctx.files()) exclude = set(f for f in initialfiles if match(f)) # No files matched commit, so nothing excluded if not exclude: return None files = (initialfiles - exclude) # return the p1 so that we don't create an obsmarker later if not keepcommit: return ctx.parents()[0].node() # Filter copies copied = copies.pathcopies(base, ctx) copied = dict( (dst, src) for dst, src in copied.iteritems() if dst in files) def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): if path not in contentctx: return None fctx = contentctx[path] mctx = context.memfilectx(repo, memctx, fctx.path(), fctx.data(), fctx.islink(), fctx.isexec(), copied=copied.get(path)) return mctx new = context.memctx(repo, parents=[base.node(), node.nullid], text=ctx.description(), files=files, filectxfn=filectxfn, user=ctx.user(), date=ctx.date(), extra=ctx.extra()) # phase handling commitphase = ctx.phase() overrides = {('phases', 'new-commit'): commitphase} with repo.ui.configoverride(overrides, 'uncommit'): newid = repo.commitctx(new) return newid
def _commitfiltered(repo, ctx, match, keepcommit): """Recommit ctx with changed files not in match. Return the new node identifier, or None if nothing changed. """ base = ctx.p1() # ctx initialfiles = set(ctx.files()) exclude = set(f for f in initialfiles if match(f)) # No files matched commit, so nothing excluded if not exclude: return None # return the p1 so that we don't create an obsmarker later if not keepcommit: return ctx.p1().node() files = (initialfiles - exclude) # Filter copies copied = copiesmod.pathcopies(base, ctx) copied = dict( (dst, src) for dst, src in copied.iteritems() if dst in files) def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): if path not in contentctx: return None fctx = contentctx[path] mctx = context.memfilectx(repo, memctx, fctx.path(), fctx.data(), fctx.islink(), fctx.isexec(), copysource=copied.get(path)) return mctx if not files: repo.ui.status(_("note: keeping empty commit\n")) new = context.memctx(repo, parents=[base.node(), node.nullid], text=ctx.description(), files=files, filectxfn=filectxfn, user=ctx.user(), date=ctx.date(), extra=ctx.extra()) return repo.commitctx(new)
def _interestingfiles(repo, matcher): """Find what files were added or removed in this commit. Returns a tuple of two lists: (added, removed). Only files not *already* marked as moved are included in the added list. """ stat = repo.status(match=matcher) added = stat.added removed = stat.removed copy = copies.pathcopies(repo[b'.'], repo[None], matcher) # remove the copy files for which we already have copy info added = [f for f in added if f not in copy] return added, removed
def handle_update(past, now): m = matcher(now) if not m: return data = {'totals': handle_current(now, m), 'updates': {}} stat = past.status(now, match=m) copies = {} if stat.added: copies = pathcopies(past, now) for f in stat.modified + stat.added: if f not in stat.added or f in copies: past_content = past[copies.get(f, f)].data() else: past_content = '' update = compare_content(f, past_content, now[f].data()) if update: data['updates'][f] = update return data
def visualdiff(ui, repo, pats, opts): revs = opts.get('rev', []) change = opts.get('change') try: ctx1b = None if change: ctx2 = repo[change] p = ctx2.parents() if len(p) > 1: ctx1a, ctx1b = p else: ctx1a = p[0] else: n1, n2 = scmutil.revpair(repo, revs) ctx1a, ctx2 = repo[n1], repo[n2] p = ctx2.parents() if not revs and len(p) > 1: ctx1b = p[1] except (error.LookupError, error.RepoError): QMessageBox.warning(None, _('Unable to find changeset'), _('You likely need to refresh this application')) return None pats = scmutil.expandpats(pats) m = match.match(repo.root, '', pats, None, None, 'relpath') n2 = ctx2.node() mod_a, add_a, rem_a = map(set, repo.status(ctx1a.node(), n2, m)[:3]) if ctx1b: mod_b, add_b, rem_b = map(set, repo.status(ctx1b.node(), n2, m)[:3]) cpy = copies.mergecopies(repo, ctx1a, ctx1b, ctx1a.ancestor(ctx1b))[0] else: cpy = copies.pathcopies(ctx1a, ctx2) mod_b, add_b, rem_b = set(), set(), set() MA = mod_a | add_a | mod_b | add_b MAR = MA | rem_a | rem_b if not MAR: QMessageBox.information(None, _('No file changes'), _('There are no file changes to view')) return None detectedtools = hglib.difftools(repo.ui) if not detectedtools: QMessageBox.warning(None, _('No diff tool found'), _('No visual diff tools were detected')) return None preferred = besttool(repo.ui, detectedtools, opts.get('tool')) # Build tool list based on diff-patterns matches toollist = set() patterns = repo.ui.configitems('diff-patterns') patterns = [(p, t) for p,t in patterns if t in detectedtools] for path in MAR: for pat, tool in patterns: mf = match.match(repo.root, '', [pat]) if mf(path): toollist.add(tool) break else: toollist.add(preferred) cto = cpy.keys() for path in MAR: if path in cto: hascopies = True break else: hascopies = False force = repo.ui.configbool('tortoisehg', 'forcevdiffwin') if len(toollist) > 1 or (hascopies and len(MAR) > 1) or force: usewin = True else: preferred = toollist.pop() dirdiff = repo.ui.configbool('merge-tools', preferred + '.dirdiff') dir3diff = repo.ui.configbool('merge-tools', preferred + '.dir3diff') usewin = repo.ui.configbool('merge-tools', preferred + '.usewin') if not usewin and len(MAR) > 1: if ctx1b is not None: usewin = not dir3diff else: usewin = not dirdiff if usewin: # Multiple required tools, or tool does not support directory diffs sa = [mod_a, add_a, rem_a] sb = [mod_b, add_b, rem_b] dlg = FileSelectionDialog(repo, pats, ctx1a, sa, ctx1b, sb, ctx2, cpy) return dlg # We can directly use the selected tool, without a visual diff window diffcmd, diffopts, mergeopts = detectedtools[preferred] # Disable 3-way merge if there is only one parent or no tool support do3way = False if ctx1b: if mergeopts: do3way = True args = mergeopts else: args = diffopts if str(ctx1b.rev()) in revs: ctx1a = ctx1b else: args = diffopts def dodiff(): assert not (hascopies and len(MAR) > 1), \ 'dodiff cannot handle copies when diffing dirs' sa = [mod_a, add_a, rem_a] sb = [mod_b, add_b, rem_b] ctxs = [ctx1a, ctx1b, ctx2] # If more than one file, diff on working dir copy. copyworkingdir = len(MAR) > 1 dirs, labels, fns_and_mtimes = snapshotset(repo, ctxs, sa, sb, cpy, copyworkingdir) dir1a, dir1b, dir2 = dirs label1a, label1b, label2 = labels fns_and_mtime = fns_and_mtimes[2] if len(MAR) > 1 and label2 == '': label2 = 'working files' def getfile(fname, dir, label): file = os.path.join(qtlib.gettempdir(), dir, fname) if os.path.isfile(file): return fname+label, file nullfile = os.path.join(qtlib.gettempdir(), 'empty') fp = open(nullfile, 'w') fp.close() return (hglib.fromunicode(_nonexistant, 'replace') + label, nullfile) # If only one change, diff the files instead of the directories # Handle bogus modifies correctly by checking if the files exist if len(MAR) == 1: file2 = MAR.pop() file2local = util.localpath(file2) if file2 in cto: file1 = util.localpath(cpy[file2]) else: file1 = file2 label1a, dir1a = getfile(file1, dir1a, label1a) if do3way: label1b, dir1b = getfile(file1, dir1b, label1b) label2, dir2 = getfile(file2local, dir2, label2) if do3way: label1a += '[local]' label1b += '[other]' label2 += '[merged]' repoagent = repo._pyqtobj # TODO replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b, plabel1=label1a, plabel2=label1b, phash1=str(ctx1a), phash2=str(ctx1b), repo=hglib.fromunicode(repoagent.displayName()), clabel=label2, child=dir2, chash=str(ctx2)) launchtool(diffcmd, args, replace, True) # detect if changes were made to mirrored working files for copy_fn, working_fn, mtime in fns_and_mtime: try: if os.lstat(copy_fn).st_mtime != mtime: ui.debug('file changed while diffing. ' 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn)) util.copyfile(copy_fn, working_fn) except EnvironmentError: pass # Ignore I/O errors or missing files def dodiffwrapper(): try: dodiff() finally: # cleanup happens atexit ui.note('cleaning up temp directory\n') if opts.get('mainapp'): dodiffwrapper() else: # We are not the main application, so this must be done in a # background thread thread = threading.Thread(target=dodiffwrapper, name='visualdiff') thread.setDaemon(True) thread.start()
def collapse(repo, first, last, commitopts, skipprompt=False): """collapse the set of revisions from first to last as new one. Expected commit options are: - message - date - username Commit message is edited in all cases. This function works in memory.""" ctxs = list(repo.set('%d::%d', first, last)) if not ctxs: return None base = first.parents()[0] # commit a new version of the old changeset, including the update # collect all files which might be affected files = set() for ctx in ctxs: files.update(ctx.files()) # Recompute copies (avoid recording a -> b -> a) copied = copies.pathcopies(base, last) # prune files which were reverted by the updates def samefile(f): if f in last.manifest(): a = last.filectx(f) if f in base.manifest(): b = base.filectx(f) return (a.data() == b.data() and a.flags() == b.flags()) else: return False else: return f not in base.manifest() files = [f for f in files if not samefile(f)] # commit version of these files as defined by head headmf = last.manifest() def filectxfn(repo, ctx, path): if path in headmf: fctx = last[path] flags = fctx.flags() mctx = context.memfilectx(repo, fctx.path(), fctx.data(), islink='l' in flags, isexec='x' in flags, copied=copied.get(path)) return mctx return None if commitopts.get('message'): message = commitopts['message'] else: message = first.description() user = commitopts.get('user') date = commitopts.get('date') extra = commitopts.get('extra') parents = (first.p1().node(), first.p2().node()) editor = None if not skipprompt: editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold') new = context.memctx(repo, parents=parents, text=message, files=files, filectxfn=filectxfn, user=user, date=date, extra=extra, editor=editor) return repo.commitctx(new)
def collapse(repo, first, last, commitopts): """collapse the set of revisions from first to last as new one. Expected commit options are: - message - date - username Commit message is edited in all cases. This function works in memory.""" ctxs = list(repo.set("%d::%d", first, last)) if not ctxs: return None base = first.parents()[0] # commit a new version of the old changeset, including the update # collect all files which might be affected files = set() for ctx in ctxs: files.update(ctx.files()) # Recompute copies (avoid recording a -> b -> a) copied = copies.pathcopies(base, last) # prune files which were reverted by the updates def samefile(f): if f in last.manifest(): a = last.filectx(f) if f in base.manifest(): b = base.filectx(f) return a.data() == b.data() and a.flags() == b.flags() else: return False else: return f not in base.manifest() files = [f for f in files if not samefile(f)] # commit version of these files as defined by head headmf = last.manifest() def filectxfn(repo, ctx, path): if path in headmf: fctx = last[path] flags = fctx.flags() mctx = context.memfilectx( fctx.path(), fctx.data(), islink="l" in flags, isexec="x" in flags, copied=copied.get(path) ) return mctx raise IOError() if commitopts.get("message"): message = commitopts["message"] else: message = first.description() user = commitopts.get("user") date = commitopts.get("date") extra = commitopts.get("extra") parents = (first.p1().node(), first.p2().node()) new = context.memctx( repo, parents=parents, text=message, files=files, filectxfn=filectxfn, user=user, date=date, extra=extra ) new._text = cmdutil.commitforceeditor(repo, new, []) return repo.commitctx(new)
def dodiff(ui, repo, cmdline, pats, opts): """Do the actual diff.""" revs = opts.get('rev') old, new = scmutil.revpair(repo, revs) subrepos = opts.get('subrepos') matcher = scmutil.match(new, pats, opts) status = old.status(new, matcher, listsubrepos=subrepos) copy = copies.pathcopies(old, new, matcher) mod, add, rem = map(set, status[:3]) paths_new = mod | add paths_old = mod | set(copy.values()) paths_all = paths_old | paths_new if not paths_all: return 0 tmproot = pycompat.mkdtemp(prefix='extdiff2.') try: # Always make a copy of old dir_old = snapshot(ui, repo, paths_old, old.node(), tmproot, subrepos) dir_old = os.path.join(tmproot, dir_old) label_old = '@%d' % old.rev() # If new in not the wc, copy it if new.node(): dir_new = snapshot(ui, repo, paths_new, new.node(), tmproot, subrepos) label_new = '@%d' % new.rev() else: # This lets the diff tool open the changed file(s) directly dir_new = '' label_new = '' # Diff the files instead of the directories # Handle bogus modifies correctly by checking if the files exist for path in paths_new: path = util.localpath(path) path_old = os.path.join(dir_old, copy.get(path, path)) label_old = path + label_old #if not os.path.isfile(path_old): #path_old = os.devnull path_new = os.path.join(repo.root, path) if not dir_new: path_new = os.path.relpath(path_new) label_new = path + label_new # Function to quote file/dir names in the argument string. replace = { 'old': path_old, 'olabel': label_old, 'nlabel': label_new, 'new': path_new, 'root': repo.root } def quote(match): pre = match.group(2) key = match.group(3) return pre + procutil.shellquote(replace[key]) regex = (br"""(['"]?)([^\s'"$]*)""" br'\$(old|new|olabel|nlabel|root)\1') if not re.search(regex, cmdline): cmdline2 = cmdline + ' $old $new' else: cmdline2 = cmdline cmdline3 = re.sub(regex, quote, cmdline2) ui.write(pycompat.bytestr(cmdline3) + b'\n') ui.system(cmdline3, blockedtag='extdiff2') return 1 finally: ui.note(_('cleaning up temp directory\n')) shutil.rmtree(tmproot)
def collapse(repo, first, last, commitopts): """collapse the set of revisions from first to last as new one. Expected commit options are: - message - date - username Commit message is edited in all cases. This function works in memory.""" ctxs = list(repo.set('%d::%d', first, last)) if not ctxs: return None base = first.parents()[0] # commit a new version of the old changeset, including the update # collect all files which might be affected files = set() for ctx in ctxs: files.update(ctx.files()) # Recompute copies (avoid recording a -> b -> a) copied = copies.pathcopies(base, last) # prune files which were reverted by the updates def samefile(f): if f in last.manifest(): a = last.filectx(f) if f in base.manifest(): b = base.filectx(f) return (a.data() == b.data() and a.flags() == b.flags()) else: return False else: return f not in base.manifest() files = [f for f in files if not samefile(f)] # commit version of these files as defined by head headmf = last.manifest() def filectxfn(repo, ctx, path): if path in headmf: fctx = last[path] flags = fctx.flags() mctx = context.memfilectx(repo, fctx.path(), fctx.data(), islink='l' in flags, isexec='x' in flags, copied=copied.get(path)) return mctx return None if commitopts.get('message'): message = commitopts['message'] else: message = first.description() user = commitopts.get('user') date = commitopts.get('date') extra = commitopts.get('extra') parents = (first.p1().node(), first.p2().node()) editor = None if not commitopts.get('rollup'): editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold') new = context.memctx(repo, parents=parents, text=message, files=files, filectxfn=filectxfn, user=user, date=date, extra=extra, editor=editor) return repo.commitctx(new)
def _amend(orig, ui, repo, old, extra, pats, opts): """Wraps amend to collect copytrace data on amend If a file is created in one commit, modified in a subsequent commit, and then renamed or copied by amending the original commit, restacking the commits that modify the file will fail: file modified here B B' restack of B to B' will fail | : file created here A --> A' file renamed in amended commit | / o -- This function collects information about copies and renames from amend commits, and saves it for use during rebases onto the amend commit. This lets rebases onto files that been renamed or copied in an amend commit work without conflicts. This function collects the copytrace information from the working copy and stores it against the amended commit in a separate dbm file. Later, in _domergecopies, this information will be merged with the rebase copytrace data to incorporate renames and copies made during the amend. """ # Check if amend copytracing has been disabled. if not ui.configbool("copytrace", "enableamendcopytrace"): return orig(ui, repo, old, extra, pats, opts) # Need to get the amend-copies before calling the command because files from # the working copy will be used during the amend. wctx = repo[None] # Find the amend-copies. matcher = scmutil.match(wctx, pats, opts) amend_copies = copiesmod.pathcopies(old, wctx, matcher) # Finally, invoke the command. node = orig(ui, repo, old, extra, pats, opts) amended_ctx = repo[node] # Store the amend-copies against the amended context. if amend_copies: path = repo.vfs.join('amendcopytrace') try: # Open the database, creating it if it doesn't already exist. db = anydbm.open(path, 'c') except anydbm.error as e: # Database locked, can't record these amend-copies. ui.log('copytrace', 'Failed to open amendcopytrace db: %s' % e) return node # Merge in any existing amend copies from any previous amends. try: orig_data = db.get(old.node(), '{}') except anydbm.error as e: ui.log('copytrace', 'Failed to read key %s from amendcopytrace db: %s' % (old.hex(), e)) return node orig_encoded = json.loads(orig_data) orig_amend_copies = dict((k.decode('base64'), v.decode('base64')) for (k, v) in orig_encoded.iteritems()) # Copytrace information is not valid if it refers to a file that # doesn't exist in a commit. We need to update or remove entries # that refer to files that might have only existed in the previous # amend commit. # # Find chained copies and renames (a -> b -> c) and collapse them to # (a -> c). Delete the entry for b if this was a rename. for dst, src in amend_copies.iteritems(): if src in orig_amend_copies: amend_copies[dst] = orig_amend_copies[src] if src not in amended_ctx: del orig_amend_copies[src] # Copy any left over copies from the previous context. for dst, src in orig_amend_copies.iteritems(): if dst not in amend_copies: amend_copies[dst] = src # Write out the entry for the new amend commit. encoded = dict((k.encode('base64'), v.encode('base64')) for (k, v) in amend_copies.iteritems()) db[node] = json.dumps(encoded) try: db.close() except Exception as e: # Database corruption. Not much we can do, so just log. ui.log('copytrace', 'Failed to close amendcopytrace db: %s' % e) return node
def diff(request): if not request.GET.get('repo'): return http.HttpResponseBadRequest("Missing 'repo' parameter") reponame = request.GET['repo'] repopath = settings.REPOSITORY_BASE + '/' + reponame try: repo_url = Repository.objects.get(name=reponame).url except Repository.DoesNotExist: raise http.Http404("Repository not found") if not request.GET.get('from'): return http.HttpResponseBadRequest("Missing 'from' parameter") if not request.GET.get('to'): return http.HttpResponseBadRequest("Missing 'to' parameter") ui = _ui() repo = repository(ui, repopath) # Convert the 'from' and 'to' to strings (instead of unicode) # in case mercurial needs to look for the key in binary data. # This prevents UnicodeWarning messages. ctx1 = repo.changectx(str(request.GET['from'])) ctx2 = repo.changectx(str(request.GET['to'])) copies = pathcopies(ctx1, ctx2) match = None # maybe get something from l10n.ini and cmdutil changed, added, removed = repo.status(ctx1, ctx2, match=match)[:3] # split up the copies info into thos that were renames and those that # were copied. moved = {} copied = {} for new_name, old_name in copies.items(): if old_name in removed: moved[new_name] = old_name else: copied[new_name] = old_name paths = ([(f, 'changed') for f in changed] + [(f, 'removed') for f in removed if f not in moved.values()] + [(f, (f in moved and 'moved') or (f in copied and 'copied') or 'added') for f in added]) diffs = DataTree(dict) for path, action in paths: lines = [] try: p = getParser(path) except UserWarning: diffs[path].update({ 'path': path, 'isFile': True, 'rev': ((action == 'removed') and request.GET['from'] or request.GET['to']), 'class': action, 'renamed': moved.get(path), 'copied': copied.get(path) }) continue if action == 'added': a_entities = [] a_map = {} else: realpath = (action == 'moved' and moved[path] or action == 'copied' and copied[path] or path) data = ctx1.filectx(realpath).data() data = _universal_newlines(data) try: p.readContents(data) a_entities, a_map = p.parse() except: # consider doing something like: # logging.warn('Unable to parse %s', path, exc_info=True) diffs[path].update({ 'path': path, 'isFile': True, 'rev': ((action == 'removed') and request.GET['from'] or request.GET['to']), 'class': action, 'renamed': moved.get(path), 'copied': copied.get(path) }) continue if action == 'removed': c_entities, c_map = [], {} else: data = ctx2.filectx(path).data() data = _universal_newlines(data) try: p.readContents(data) c_entities, c_map = p.parse() except: # consider doing something like: # logging.warn('Unable to parse %s', path, exc_info=True) diffs[path].update({ 'path': path, 'isFile': True, 'rev': ((action == 'removed') and request.GET['from'] or request.GET['to']), 'class': action }) continue a_list = sorted(a_map.keys()) c_list = sorted(c_map.keys()) ar = AddRemove() ar.set_left(a_list) ar.set_right(c_list) for action, item_or_pair in ar: if action == 'delete': lines.append({ 'class': 'removed', 'oldval': [{'value':a_entities[a_map[item_or_pair]].val}], 'newval': '', 'entity': item_or_pair }) elif action == 'add': lines.append({ 'class': 'added', 'oldval': '', 'newval': [{'value': c_entities[c_map[item_or_pair]].val}], 'entity': item_or_pair }) else: oldval = a_entities[a_map[item_or_pair[0]]].val newval = c_entities[c_map[item_or_pair[1]]].val if oldval == newval: continue sm = SequenceMatcher(None, oldval, newval) oldhtml = [] newhtml = [] for op, o1, o2, n1, n2 in sm.get_opcodes(): if o1 != o2: oldhtml.append({'class': op, 'value': oldval[o1:o2]}) if n1 != n2: newhtml.append({'class': op, 'value': newval[n1:n2]}) lines.append({'class': 'changed', 'oldval': oldhtml, 'newval': newhtml, 'entity': item_or_pair[0]}) container_class = lines and 'file' or 'empty-diff' diffs[path].update({'path': path, 'class': container_class, 'lines': lines, 'renamed': moved.get(path), 'copied': copied.get(path) }) diffs = diffs.toJSON().get('children', []) return render(request, 'shipping/diff.html', { 'given_title': request.GET.get('title', None), 'repo': reponame, 'repo_url': repo_url, 'old_rev': request.GET['from'], 'new_rev': request.GET['to'], 'diffs': diffs })
def visualdiff(ui, repo, pats, opts): revs = opts.get('rev', []) change = opts.get('change') try: ctx1b = None if change: ctx2 = repo[change] p = ctx2.parents() if len(p) > 1: ctx1a, ctx1b = p else: ctx1a = p[0] else: n1, n2 = scmutil.revpair(repo, revs) ctx1a, ctx2 = repo[n1], repo[n2] p = ctx2.parents() if not revs and len(p) > 1: ctx1b = p[1] except (error.LookupError, error.RepoError): QMessageBox.warning(None, _('Unable to find changeset'), _('You likely need to refresh this application')) return None pats = scmutil.expandpats(pats) m = match.match(repo.root, '', pats, None, None, 'relpath') n2 = ctx2.node() mod_a, add_a, rem_a = map(set, repo.status(ctx1a.node(), n2, m)[:3]) if ctx1b: mod_b, add_b, rem_b = map(set, repo.status(ctx1b.node(), n2, m)[:3]) cpy = copies.mergecopies(repo, ctx1a, ctx1b, ctx1a.ancestor(ctx1b))[0] else: cpy = copies.pathcopies(ctx1a, ctx2) mod_b, add_b, rem_b = set(), set(), set() MA = mod_a | add_a | mod_b | add_b MAR = MA | rem_a | rem_b if not MAR: QMessageBox.information(None, _('No file changes'), _('There are no file changes to view')) return None detectedtools = hglib.difftools(repo.ui) if not detectedtools: QMessageBox.warning(None, _('No diff tool found'), _('No visual diff tools were detected')) return None preferred = besttool(repo.ui, detectedtools, opts.get('tool')) # Build tool list based on diff-patterns matches toollist = set() patterns = repo.ui.configitems('diff-patterns') patterns = [(p, t) for p, t in patterns if t in detectedtools] for path in MAR: for pat, tool in patterns: mf = match.match(repo.root, '', [pat]) if mf(path): toollist.add(tool) break else: toollist.add(preferred) cto = cpy.keys() for path in MAR: if path in cto: hascopies = True break else: hascopies = False force = repo.ui.configbool('tortoisehg', 'forcevdiffwin') if len(toollist) > 1 or (hascopies and len(MAR) > 1) or force: usewin = True else: preferred = toollist.pop() dirdiff = repo.ui.configbool('merge-tools', preferred + '.dirdiff') dir3diff = repo.ui.configbool('merge-tools', preferred + '.dir3diff') usewin = repo.ui.configbool('merge-tools', preferred + '.usewin') if not usewin and len(MAR) > 1: if ctx1b is not None: usewin = not dir3diff else: usewin = not dirdiff if usewin: # Multiple required tools, or tool does not support directory diffs sa = [mod_a, add_a, rem_a] sb = [mod_b, add_b, rem_b] dlg = FileSelectionDialog(repo, pats, ctx1a, sa, ctx1b, sb, ctx2, cpy) return dlg # We can directly use the selected tool, without a visual diff window diffcmd, diffopts, mergeopts = detectedtools[preferred] # Disable 3-way merge if there is only one parent or no tool support do3way = False if ctx1b: if mergeopts: do3way = True args = mergeopts else: args = diffopts if str(ctx1b.rev()) in revs: ctx1a = ctx1b else: args = diffopts def dodiff(): assert not (hascopies and len(MAR) > 1), \ 'dodiff cannot handle copies when diffing dirs' sa = [mod_a, add_a, rem_a] sb = [mod_b, add_b, rem_b] ctxs = [ctx1a, ctx1b, ctx2] # If more than one file, diff on working dir copy. copyworkingdir = len(MAR) > 1 dirs, labels, fns_and_mtimes = snapshotset(repo, ctxs, sa, sb, cpy, copyworkingdir) dir1a, dir1b, dir2 = dirs label1a, label1b, label2 = labels fns_and_mtime = fns_and_mtimes[2] if len(MAR) > 1 and label2 == '': label2 = 'working files' def getfile(fname, dir, label): file = os.path.join(qtlib.gettempdir(), dir, fname) if os.path.isfile(file): return fname + label, file nullfile = os.path.join(qtlib.gettempdir(), 'empty') fp = open(nullfile, 'w') fp.close() return (hglib.fromunicode(_nonexistant, 'replace') + label, nullfile) # If only one change, diff the files instead of the directories # Handle bogus modifies correctly by checking if the files exist if len(MAR) == 1: file2 = MAR.pop() file2local = util.localpath(file2) if file2 in cto: file1 = util.localpath(cpy[file2]) else: file1 = file2 label1a, dir1a = getfile(file1, dir1a, label1a) if do3way: label1b, dir1b = getfile(file1, dir1b, label1b) label2, dir2 = getfile(file2local, dir2, label2) if do3way: label1a += '[local]' label1b += '[other]' label2 += '[merged]' replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b, plabel1=label1a, plabel2=label1b, phash1=str(ctx1a), phash2=str(ctx1b), repo=hglib.fromunicode(repo.displayname), clabel=label2, child=dir2, chash=str(ctx2)) launchtool(diffcmd, args, replace, True) # detect if changes were made to mirrored working files for copy_fn, working_fn, mtime in fns_and_mtime: try: if os.lstat(copy_fn).st_mtime != mtime: ui.debug('file changed while diffing. ' 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn)) util.copyfile(copy_fn, working_fn) except EnvironmentError: pass # Ignore I/O errors or missing files def dodiffwrapper(): try: dodiff() finally: # cleanup happens atexit ui.note('cleaning up temp directory\n') if opts.get('mainapp'): dodiffwrapper() else: # We are not the main application, so this must be done in a # background thread thread = threading.Thread(target=dodiffwrapper, name='visualdiff') thread.setDaemon(True) thread.start()
def rewrite(repo, old, updates, head, newbases, commitopts): """Return (nodeid, created) where nodeid is the identifier of the changeset generated by the rewrite process, and created is True if nodeid was actually created. If created is False, nodeid references a changeset existing before the rewrite call. """ wlock = lock = tr = None try: wlock = repo.wlock() lock = repo.lock() tr = repo.transaction('rewrite') if len(old.parents()) > 1: # XXX remove this unnecessary limitation. raise error.Abort(_('cannot amend merge changesets')) base = old.p1() updatebookmarks = bookmarksupdater( repo, [old.node()] + [u.node() for u in updates], tr) # commit a new version of the old changeset, including the update # collect all files which might be affected files = set(old.files()) for u in updates: files.update(u.files()) # Recompute copies (avoid recording a -> b -> a) copied = copies.pathcopies(base, head) # prune files which were reverted by the updates def samefile(f): if f in head.manifest(): a = head.filectx(f) if f in base.manifest(): b = base.filectx(f) return (a.data() == b.data() and a.flags() == b.flags()) else: return False else: return f not in base.manifest() files = [f for f in files if not samefile(f)] # commit version of these files as defined by head headmf = head.manifest() def filectxfn(repo, ctx, path): if path in headmf: fctx = head[path] flags = fctx.flags() mctx = context.memfilectx(repo, ctx, fctx.path(), fctx.data(), islink='l' in flags, isexec='x' in flags, copied=copied.get(path)) return mctx return None message = cmdutil.logmessage(repo.ui, commitopts) if not message: message = old.description() user = commitopts.get('user') or old.user() # TODO: In case not date is given, we should take the old commit date # if we are working one one changeset or mimic the fold behavior about # date date = commitopts.get('date') or None extra = dict(commitopts.get('extra', old.extra())) extra['branch'] = head.branch() new = context.memctx(repo, parents=newbases, text=message, files=files, filectxfn=filectxfn, user=user, date=date, extra=extra) if commitopts.get('edit'): new._text = cmdutil.commitforceeditor(repo, new, []) revcount = len(repo) newid = repo.commitctx(new) new = repo[newid] created = len(repo) != revcount updatebookmarks(newid) tr.close() return newid, created finally: lockmod.release(tr, lock, wlock)
def d(): copies.pathcopies(ctx1, ctx2)