def findoutgoing(ui, repo, remote=None, force=False, opts=None): """utility function to find the first outgoing changeset Used by initialization code""" if opts is None: opts = {} dest = ui.expandpath(remote or 'default-push', remote or 'default') dest, revs = hg.parseurl(dest, None)[:2] ui.status(_('comparing with %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, revs, None) other = hg.peer(repo, opts, dest) if revs: revs = [repo.lookup(rev) for rev in revs] outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force) if not outgoing.missing: raise error.Abort(_('no outgoing ancestors')) roots = list(repo.revs("roots(%ln)", outgoing.missing)) if 1 < len(roots): msg = _('there are ambiguous outgoing revisions') hint = _('see "hg help histedit" for more detail') raise error.Abort(msg, hint=hint) return repo.lookup(roots[0])
def push(self, remote, force=False, revs=None, newbranch=False): outgoing = discovery.findcommonoutgoing(repo, remote.peer(), force=force) if outgoing.missing: toupload = set() o = self.changelog.nodesbetween(outgoing.missing, revs)[0] for n in o: parents = [p for p in self.changelog.parents(n) if p != node_.nullid] ctx = self[n] files = set(ctx.files()) if len(parents) == 2: mc = ctx.manifest() mp1 = ctx.parents()[0].manifest() mp2 = ctx.parents()[1].manifest() for f in mp1: if f not in mc: files.add(f) for f in mp2: if f not in mc: files.add(f) for f in mc: if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): files.add(f) toupload = toupload.union( set([ctx[f].data().strip() for f in files if lfutil.isstandin(f) and f in ctx])) lfcommands.uploadlfiles(ui, self, remote, toupload) return super(lfilesrepo, self).push(remote, force, revs, newbranch)
def findoutgoing(repo, remoterepo): # The method for doing this has changed a few times... try: from mercurial import discovery except ImportError: # Must be earlier than 1.6 return repo.findoutgoing(remoterepo) try: if LooseVersion(util.version()) >= LooseVersion('2.1'): outgoing = discovery.findcommonoutgoing(repo, remoterepo) return outgoing.missing common, outheads = discovery.findcommonoutgoing(repo, remoterepo) return repo.changelog.findmissing(common=common, heads=outheads) except AttributeError: # Must be earlier than 1.9 return discovery.findoutgoing(repo, remoterepo)
def push(self, remote, force=False, revs=None, newbranch=False): if remote.local(): missing = set(self.requirements) - remote.local().supported if missing: msg = _("required features are not" " supported in the destination:" " %s") % (', '.join(sorted(missing))) raise util.Abort(msg) outgoing = discovery.findcommonoutgoing(repo, remote.peer(), force=force) if outgoing.missing: toupload = set() o = self.changelog.nodesbetween(outgoing.missing, revs)[0] for n in o: parents = [p for p in self.changelog.parents(n) if p != node_.nullid] ctx = self[n] files = set(ctx.files()) if len(parents) == 2: mc = ctx.manifest() mp1 = ctx.parents()[0].manifest() mp2 = ctx.parents()[1].manifest() for f in mp1: if f not in mc: files.add(f) for f in mp2: if f not in mc: files.add(f) for f in mc: if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): files.add(f) toupload = toupload.union( set([ctx[f].data().strip() for f in files if lfutil.isstandin(f) and f in ctx])) lfcommands.uploadlfiles(ui, self, remote, toupload) return super(lfilesrepo, self).push(remote, force, revs, newbranch)
def getoutgoinglfiles(ui, repo, dest=None, **opts): dest = ui.expandpath(dest or 'default-push', dest or 'default') dest, branches = hg.parseurl(dest, opts.get('branch')) revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) if revs: revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)] try: remote = hg.peer(repo, opts, dest) except error.RepoError: return None outgoing = discovery.findcommonoutgoing(repo, remote.peer(), force=False) if not outgoing.missing: return outgoing.missing o = repo.changelog.nodesbetween(outgoing.missing, revs)[0] if opts.get('newest_first'): o.reverse() toupload = set() for n in o: parents = [p for p in repo.changelog.parents(n) if p != node.nullid] ctx = repo[n] files = set(ctx.files()) if len(parents) == 2: mc = ctx.manifest() mp1 = ctx.parents()[0].manifest() mp2 = ctx.parents()[1].manifest() for f in mp1: if f not in mc: files.add(f) for f in mp2: if f not in mc: files.add(f) for f in mc: if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): files.add(f) toupload = toupload.union( set([f for f in files if lfutil.isstandin(f) and f in ctx])) return sorted(toupload)
def findoutgoing(ui, repo, remote=None, force=False, opts={}): """utility function to find the first outgoing changeset Used by initialisation code""" dest = ui.expandpath(remote or 'default-push', remote or 'default') dest, revs = hg.parseurl(dest, None)[:2] ui.status(_('comparing with %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, revs, None) other = hg.peer(repo, opts, dest) if revs: revs = [repo.lookup(rev) for rev in revs] # hexlify nodes from outgoing, because we're going to parse # parent[0] using revsingle below, and if the binary hash # contains special revset characters like ":" the revset # parser can choke. outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force) if not outgoing.missing: raise util.Abort(_('no outgoing ancestors')) return outgoing.missing[0]
def findoutgoing(ui, repo, remote=None, force=False, opts={}): """utility function to find the first outgoing changeset Used by initialisation code""" dest = ui.expandpath(remote or 'default-push', remote or 'default') dest, revs = hg.parseurl(dest, None)[:2] ui.status(_('comparing with %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, revs, None) other = hg.peer(repo, opts, dest) if revs: revs = [repo.lookup(rev) for rev in revs] outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force) if not outgoing.missing: raise util.Abort(_('no outgoing ancestors')) roots = list(repo.revs("roots(%ln)", outgoing.missing)) if 1 < len(roots): msg = _('there are ambiguous outgoing revisions') hint = _('see "hg help histedit" for more detail') raise util.Abort(msg, hint=hint) return repo.lookup(roots[0])
def push(self, remote, force=False, revs=None, newbranch=False): outgoing = discovery.findcommonoutgoing(repo, remote.peer(), force=force) if outgoing.missing: toupload = set() o = self.changelog.nodesbetween(outgoing.missing, revs)[0] for n in o: parents = [ p for p in self.changelog.parents(n) if p != node_.nullid ] ctx = self[n] files = set(ctx.files()) if len(parents) == 2: mc = ctx.manifest() mp1 = ctx.parents()[0].manifest() mp2 = ctx.parents()[1].manifest() for f in mp1: if f not in mc: files.add(f) for f in mp2: if f not in mc: files.add(f) for f in mc: if mc[f] != mp1.get(f, None) or mc[f] != mp2.get( f, None): files.add(f) toupload = toupload.union( set([ ctx[f].data().strip() for f in files if lfutil.isstandin(f) and f in ctx ])) lfcommands.uploadlfiles(ui, self, remote, toupload) return super(lfilesrepo, self).push(remote, force, revs, newbranch)
def _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes, force): oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes) newmatch = narrowspec.match(repo.root, newincludes, newexcludes) # This is essentially doing "hg outgoing" to find all local-only # commits. We will then check that the local-only commits don't # have any changes to files that will be untracked. unfi = repo.unfiltered() outgoing = discovery.findcommonoutgoing(unfi, remote, commoninc=commoninc) ui.status(_('looking for local changes to affected paths\n')) localnodes = [] for n in itertools.chain(outgoing.missing, outgoing.excluded): if any(oldmatch(f) and not newmatch(f) for f in unfi[n].files()): localnodes.append(n) revstostrip = unfi.revs('descendants(%ln)', localnodes) hiddenrevs = repoview.filterrevs(repo, 'visible') visibletostrip = list(repo.changelog.node(r) for r in (revstostrip - hiddenrevs)) if visibletostrip: ui.status(_('The following changeset(s) or their ancestors have ' 'local changes not on the remote:\n')) maxnodes = 10 if ui.verbose or len(visibletostrip) <= maxnodes: for n in visibletostrip: ui.status('%s\n' % node.short(n)) else: for n in visibletostrip[:maxnodes]: ui.status('%s\n' % node.short(n)) ui.status(_('...and %d more, use --verbose to list all\n') % (len(visibletostrip) - maxnodes)) if not force: raise error.Abort(_('local changes found'), hint=_('use --force-delete-local-changes to ' 'ignore')) with ui.uninterruptable(): if revstostrip: tostrip = [unfi.changelog.node(r) for r in revstostrip] if repo['.'].node() in tostrip: # stripping working copy, so move to a different commit first urev = max(repo.revs('(::%n) - %ln + null', repo['.'].node(), visibletostrip)) hg.clean(repo, urev) overrides = {('devel', 'strip-obsmarkers'): False} with ui.configoverride(overrides, 'narrow'): repair.strip(ui, unfi, tostrip, topic='narrow') todelete = [] for f, f2, size in repo.store.datafiles(): if f.startswith('data/'): file = f[5:-2] if not newmatch(file): todelete.append(f) elif f.startswith('meta/'): dir = f[5:-13] dirs = ['.'] + sorted(util.dirs({dir})) + [dir] include = True for d in dirs: visit = newmatch.visitdir(d) if not visit: include = False break if visit == 'all': break if not include: todelete.append(f) repo.destroying() with repo.transaction("narrowing"): for f in todelete: ui.status(_('deleting %s\n') % f) util.unlinkpath(repo.svfs.join(f)) repo.store.markremoved(f) _narrowcleanupwdir(repo, oldincludes, oldexcludes, newincludes, newexcludes, oldmatch, newmatch) repo.setnarrowpats(newincludes, newexcludes) repo.destroyed()
def trackedcmd(ui, repo, remotepath=None, *pats, **opts): """show or change the current narrowspec With no argument, shows the current narrowspec entries, one per line. Each line will be prefixed with 'I' or 'X' for included or excluded patterns, respectively. The narrowspec is comprised of expressions to match remote files and/or directories that should be pulled into your client. The narrowspec has *include* and *exclude* expressions, with excludes always trumping includes: that is, if a file matches an exclude expression, it will be excluded even if it also matches an include expression. Excluding files that were never included has no effect. Each included or excluded entry is in the format described by 'hg help patterns'. The options allow you to add or remove included and excluded expressions. If --clear is specified, then all previous includes and excludes are DROPPED and replaced by the new ones specified to --addinclude and --addexclude. If --clear is specified without any further options, the narrowspec will be empty and will not match any files. If --auto-remove-includes is specified, then those includes that don't match any files modified by currently visible local commits (those not shared by the remote) will be added to the set of explicitly specified includes to remove. --import-rules accepts a path to a file containing rules, allowing you to add --addinclude, --addexclude rules in bulk. Like the other include and exclude switches, the changes are applied immediately. """ opts = pycompat.byteskwargs(opts) if repository.NARROW_REQUIREMENT not in repo.requirements: raise error.Abort( _(b'the tracked command is only supported on ' b'repositories cloned with --narrow')) # Before supporting, decide whether it "hg tracked --clear" should mean # tracking no paths or all paths. if opts[b'clear']: raise error.Abort(_(b'the --clear option is not yet supported')) # import rules from a file newrules = opts.get(b'import_rules') if newrules: try: filepath = os.path.join(encoding.getcwd(), newrules) fdata = util.readfile(filepath) except IOError as inst: raise error.Abort( _(b"cannot read narrowspecs from '%s': %s") % (filepath, encoding.strtolocal(inst.strerror))) includepats, excludepats, profiles = sparse.parseconfig( ui, fdata, b'narrow') if profiles: raise error.Abort( _(b"including other spec files using '%include' " b"is not supported in narrowspec")) opts[b'addinclude'].extend(includepats) opts[b'addexclude'].extend(excludepats) addedincludes = narrowspec.parsepatterns(opts[b'addinclude']) removedincludes = narrowspec.parsepatterns(opts[b'removeinclude']) addedexcludes = narrowspec.parsepatterns(opts[b'addexclude']) removedexcludes = narrowspec.parsepatterns(opts[b'removeexclude']) autoremoveincludes = opts[b'auto_remove_includes'] update_working_copy = opts[b'update_working_copy'] only_show = not (addedincludes or removedincludes or addedexcludes or removedexcludes or newrules or autoremoveincludes or update_working_copy) oldincludes, oldexcludes = repo.narrowpats # filter the user passed additions and deletions into actual additions and # deletions of excludes and includes addedincludes -= oldincludes removedincludes &= oldincludes addedexcludes -= oldexcludes removedexcludes &= oldexcludes widening = addedincludes or removedexcludes narrowing = removedincludes or addedexcludes # Only print the current narrowspec. if only_show: ui.pager(b'tracked') fm = ui.formatter(b'narrow', opts) for i in sorted(oldincludes): fm.startitem() fm.write(b'status', b'%s ', b'I', label=b'narrow.included') fm.write(b'pat', b'%s\n', i, label=b'narrow.included') for i in sorted(oldexcludes): fm.startitem() fm.write(b'status', b'%s ', b'X', label=b'narrow.excluded') fm.write(b'pat', b'%s\n', i, label=b'narrow.excluded') fm.end() return 0 if update_working_copy: with repo.wlock(), repo.lock(), repo.transaction(b'narrow-wc'): narrowspec.updateworkingcopy(repo) narrowspec.copytoworkingcopy(repo) return 0 if not (widening or narrowing or autoremoveincludes): ui.status(_(b"nothing to widen or narrow\n")) return 0 with repo.wlock(), repo.lock(): cmdutil.bailifchanged(repo) # Find the revisions we have in common with the remote. These will # be used for finding local-only changes for narrowing. They will # also define the set of revisions to update for widening. remotepath = ui.expandpath(remotepath or b'default') url, branches = hg.parseurl(remotepath) ui.status(_(b'comparing with %s\n') % util.hidepassword(url)) remote = hg.peer(repo, opts, url) # check narrow support before doing anything if widening needs to be # performed. In future we should also abort if client is ellipses and # server does not support ellipses if widening and wireprototypes.NARROWCAP not in remote.capabilities(): raise error.Abort(_(b"server does not support narrow clones")) commoninc = discovery.findcommonincoming(repo, remote) if autoremoveincludes: outgoing = discovery.findcommonoutgoing(repo, remote, commoninc=commoninc) ui.status(_(b'looking for unused includes to remove\n')) localfiles = set() for n in itertools.chain(outgoing.missing, outgoing.excluded): localfiles.update(repo[n].files()) suggestedremovals = [] for include in sorted(oldincludes): match = narrowspec.match(repo.root, [include], oldexcludes) if not any(match(f) for f in localfiles): suggestedremovals.append(include) if suggestedremovals: for s in suggestedremovals: ui.status(b'%s\n' % s) if (ui.promptchoice( _(b'remove these unused includes (yn)?' b'$$ &Yes $$ &No')) == 0): removedincludes.update(suggestedremovals) narrowing = True else: ui.status(_(b'found no unused includes\n')) if narrowing: newincludes = oldincludes - removedincludes newexcludes = oldexcludes | addedexcludes _narrow( ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes, opts[b'force_delete_local_changes'], ) # _narrow() updated the narrowspec and _widen() below needs to # use the updated values as its base (otherwise removed includes # and addedexcludes will be lost in the resulting narrowspec) oldincludes = newincludes oldexcludes = newexcludes if widening: newincludes = oldincludes | addedincludes newexcludes = oldexcludes - removedexcludes _widen( ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes, ) return 0
def findoutgoing(repo, other, force=False): common, outheads = discovery.findcommonoutgoing( repo, other, [], force=force) return repo.changelog.findmissing(common, outheads)[0:1]
def findoutgoing(repo, other, force=False): out = discovery.findcommonoutgoing(repo, other, [], force=force) return out.missing[0:1]
def _narrow( ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes, force, backup, ): oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes) newmatch = narrowspec.match(repo.root, newincludes, newexcludes) # This is essentially doing "hg outgoing" to find all local-only # commits. We will then check that the local-only commits don't # have any changes to files that will be untracked. unfi = repo.unfiltered() outgoing = discovery.findcommonoutgoing(unfi, remote, commoninc=commoninc) ui.status(_(b'looking for local changes to affected paths\n')) progress = ui.makeprogress( topic=_(b'changesets'), unit=_(b'changesets'), total=len(outgoing.missing) + len(outgoing.excluded), ) localnodes = [] with progress: for n in itertools.chain(outgoing.missing, outgoing.excluded): progress.increment() if any(oldmatch(f) and not newmatch(f) for f in unfi[n].files()): localnodes.append(n) revstostrip = unfi.revs(b'descendants(%ln)', localnodes) hiddenrevs = repoview.filterrevs(repo, b'visible') visibletostrip = list( repo.changelog.node(r) for r in (revstostrip - hiddenrevs)) if visibletostrip: ui.status( _(b'The following changeset(s) or their ancestors have ' b'local changes not on the remote:\n')) maxnodes = 10 if ui.verbose or len(visibletostrip) <= maxnodes: for n in visibletostrip: ui.status(b'%s\n' % short(n)) else: for n in visibletostrip[:maxnodes]: ui.status(b'%s\n' % short(n)) ui.status( _(b'...and %d more, use --verbose to list all\n') % (len(visibletostrip) - maxnodes)) if not force: raise error.StateError( _(b'local changes found'), hint=_(b'use --force-delete-local-changes to ignore'), ) with ui.uninterruptible(): if revstostrip: tostrip = [unfi.changelog.node(r) for r in revstostrip] if repo[b'.'].node() in tostrip: # stripping working copy, so move to a different commit first urev = max( repo.revs( b'(::%n) - %ln + null', repo[b'.'].node(), visibletostrip, )) hg.clean(repo, urev) overrides = {(b'devel', b'strip-obsmarkers'): False} if backup: ui.status(_(b'moving unwanted changesets to backup\n')) else: ui.status(_(b'deleting unwanted changesets\n')) with ui.configoverride(overrides, b'narrow'): repair.strip(ui, unfi, tostrip, topic=b'narrow', backup=backup) todelete = [] for t, f, f2, size in repo.store.datafiles(): if f.startswith(b'data/'): file = f[5:-2] if not newmatch(file): todelete.append(f) elif f.startswith(b'meta/'): dir = f[5:-13] dirs = sorted(pathutil.dirs({dir})) + [dir] include = True for d in dirs: visit = newmatch.visitdir(d) if not visit: include = False break if visit == b'all': break if not include: todelete.append(f) repo.destroying() with repo.transaction(b'narrowing'): # Update narrowspec before removing revlogs, so repo won't be # corrupt in case of crash repo.setnarrowpats(newincludes, newexcludes) for f in todelete: ui.status(_(b'deleting %s\n') % f) util.unlinkpath(repo.svfs.join(f)) repo.store.markremoved(f) ui.status(_(b'deleting unwanted files from working copy\n')) with repo.dirstate.parentchange(): narrowspec.updateworkingcopy(repo, assumeclean=True) narrowspec.copytoworkingcopy(repo) repo.destroyed()
def findoutgoing(repo, other, force=False): out = discovery.findcommonoutgoing( repo, other, [], force=force) return out.missing[0:1]