def extsetup(): # monkeypatch in the new version extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight) extensions.wrapfunction(webcommands, 'annotate', annotate_highlight) webcommands.highlightcss = generate_css webcommands.__all__.append('highlightcss')
def extsetup(ui): extensions.wrapfunction(dispatch, 'runcommand', runcommand) extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks) extensions.wrapfunction( localrepo.localrepository.dirstate, 'func', wrapdirstate) extensions.wrapfunction(hg, 'postshare', wrappostshare) extensions.wrapfunction(hg, 'copystore', unsharejournal)
def extsetup(ui): wrapfilecache(localrepo.localrepository, "dirstate", wrapdirstate) if sys.platform == "darwin": # An assist for avoiding the dangling-symlink fsevents bug extensions.wrapfunction(os, "symlink", wrapsymlink) extensions.wrapfunction(merge, "update", wrapupdate)
def extsetup(ui): extensions.wrapfunction(wireproto, '_capabilities', capabilities) pushkey.register('strip', pushstrip, liststrip) # Add a pushkey namespace to obtain the list of available review # repositories. This is used for repository discovery. pushkey.register('reviewrepos', lambda *x: False, listreviewrepos)
def ancestorcache(path): # simple cache to speed up revlog.ancestors try: db = anydbm.open(path, 'c') except anydbm.error: # database locked, fail gracefully yield else: def revlogancestor(orig, self, a, b): key = a + b try: return db[key] except KeyError: result = orig(self, a, b) db[key] = result return result extensions.wrapfunction(revlog.revlog, 'ancestor', revlogancestor) try: yield finally: extensions.unwrapfunction(revlog.revlog, 'ancestor', revlogancestor) try: db.close() except Exception: # database corruption, we just nuke the database util.tryunlink(path)
def extsetup(ui): localrepo.moderncaps.add('_evoext_b2x_obsmarkers_0') gboptsmap['evo_obscommon'] = 'nodes' if not util.safehasattr(obsolete.obsstore, 'relevantmarkers'): obsolete.obsstore = pruneobsstore obsolete.obsstore.relevantmarkers = relevantmarkers hgweb_mod.perms['evoext_pushobsmarkers_0'] = 'push' hgweb_mod.perms['evoext_pullobsmarkers_0'] = 'pull' hgweb_mod.perms['evoext_obshash'] = 'pull' wireproto.commands['evoext_pushobsmarkers_0'] = (srv_pushobsmarkers, '') wireproto.commands['evoext_pullobsmarkers_0'] = (srv_pullobsmarkers, '*') # wrap module content origfunc = exchange.getbundle2partsmapping['obsmarkers'] def newfunc(*args, **kwargs): return _getbundleobsmarkerpart(origfunc, *args, **kwargs) exchange.getbundle2partsmapping['obsmarkers'] = newfunc extensions.wrapfunction(wireproto, 'capabilities', capabilities) # wrap command content oldcap, args = wireproto.commands['capabilities'] def newcap(repo, proto): return capabilities(oldcap, repo, proto) wireproto.commands['capabilities'] = (newcap, args) wireproto.commands['evoext_obshash'] = (srv_obshash, 'nodes') wireproto.commands['evoext_obshash1'] = (srv_obshash1, 'nodes') # specific simple4server content extensions.wrapfunction(pushkey, '_nslist', _nslist) pushkey._namespaces['namespaces'] = (lambda *x: False, pushkey._nslist)
def setupserver(ui, repo): """Sets up a normal Mercurial repo so it can serve files to shallow repos. """ onetimesetup(ui) # don't send files to shallow clients during pulls def generatefiles(orig, self, changedfiles, linknodes, commonrevs, source): caps = self._bundlecaps or [] if shallowrepo.requirement in caps: # only send files that don't match the specified patterns includepattern = None excludepattern = None for cap in (self._bundlecaps or []): if cap.startswith("includepattern="): includepattern = cap[len("includepattern="):].split('\0') elif cap.startswith("excludepattern="): excludepattern = cap[len("excludepattern="):].split('\0') m = match.always(repo.root, '') if includepattern or excludepattern: m = match.match(repo.root, '', None, includepattern, excludepattern) changedfiles = list([f for f in changedfiles if not m(f)]) return orig(self, changedfiles, linknodes, commonrevs, source) wrapfunction(changegroup.cg1packer, 'generatefiles', generatefiles) # add incoming hook to continuously generate file blobs ui.setconfig("hooks", "changegroup.remotefilelog", incominghook)
def extsetup(ui): extensions.wrapfunction(wireproto, '_capabilities', capabilities) extensions.wrapfunction(exchange, '_pullobsolete', exchangepullpushlog) # Only for <3.3 support. if not hasattr(transaction.transaction, 'addpostclose'): extensions.wrapfunction(transaction, '_playback', playback) revset.symbols['pushhead'] = revset_pushhead revset.symbols['pushdate'] = revset_pushdate revset.symbols['pushuser'] = revset_pushuser keywords = { 'pushid': template_pushid, 'pushuser': template_pushuser, 'pushdate': template_pushdate, 'pushbasenode': template_pushbasenode, 'pushheadnode': template_pushheadnode, } templatekw.keywords.update(keywords) # dockeywords was removed in Mercurial 3.6. if hasattr(templatekw, 'dockeywords'): templatekw.dockeywords.update(keywords) extensions.wrapfunction(webutil, 'changesetentry', changesetentry) extensions.wrapfunction(webutil, 'changelistentry', changelistentry)
def extsetup(ui): try: from mercurial import wireproto except: from mercurial import wireprotov1server as wireproto from mercurial import extensions try: extensions.wrapfunction(wireproto, '_capabilities', _capabilities) except AttributeError: extensions.wrapcommand( wireproto.commands, 'capabilities', capabilities) def wireprotocommand(name, args='', permission='push'): if hasattr(wireproto, 'wireprotocommand'): try: return wireproto.wireprotocommand(name, args, permission) except TypeError: if hasattr(wireproto, 'permissions'): wireproto.permissions[name] = permission return wireproto.wireprotocommand(name, args) def register(func): commands = wireproto.commands assert name not in commands commands[name] = (func, args) return register wireprotocommand('cinnabarclone', permission='pull')(cinnabar)
def _clonesparsecmd(orig, ui, repo, *args, **opts): include_pat = opts.get('include') exclude_pat = opts.get('exclude') enableprofile_pat = opts.get('enable_profile') include = exclude = enableprofile = False if include_pat: pat = include_pat include = True if exclude_pat: pat = exclude_pat exclude = True if enableprofile_pat: pat = enableprofile_pat enableprofile = True if sum([include, exclude, enableprofile]) > 1: raise error.Abort(_("too many flags specified.")) if include or exclude or enableprofile: def clone_sparse(orig, self, node, overwrite, *args, **kwargs): # sparse clone is a special snowflake as in that case always # are outside of the repo's dir hierachy, yet we always want # to name our includes/excludes/enables using repo-root # relative paths overrides = { ('sparse', 'includereporootpaths'): True, ('sparse', 'enablereporootpaths'): True, } with self.ui.configoverride(overrides, 'sparse'): _config(self.ui, self.unfiltered(), pat, {}, include=include, exclude=exclude, enableprofile=enableprofile) return orig(self, node, overwrite, *args, **kwargs) extensions.wrapfunction(hg, 'updaterepo', clone_sparse) return orig(ui, repo, *args, **opts)
def uisetup(ui): if '--debugger' in sys.argv or not ui.formatted(): return def pagecmd(orig, ui, options, cmd, cmdfunc): p = ui.config("pager", "pager", os.environ.get("PAGER")) if p: attend = ui.configlist('pager', 'attend', attended) auto = options['pager'] == 'auto' always = util.parsebool(options['pager']) cmds, _ = cmdutil.findcmd(cmd, commands.table) ignore = ui.configlist('pager', 'ignore') for cmd in cmds: if (always or auto and (cmd in attend or (cmd not in ignore and not attend))): ui.setconfig('ui', 'formatted', ui.formatted()) ui.setconfig('ui', 'interactive', False) if util.safehasattr(signal, "SIGPIPE"): signal.signal(signal.SIGPIPE, signal.SIG_DFL) _runpager(ui, p) break return orig(ui, options, cmd, cmdfunc) extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
def extsetup(ui): if _fbsparseexists(ui): cmdtable.clear() return _setupclone(ui) _setuplog(ui) _setupadd(ui) _setupdirstate(ui) _setupdiff(ui) # if fsmonitor is enabled, tell it to use our hash function try: fsmonitor = extensions.find('fsmonitor') def _hashignore(orig, ignore): return _hashmatcher(ignore) extensions.wrapfunction(fsmonitor, '_hashignore', _hashignore) except KeyError: pass # do the same for hgwatchman, old name try: hgwatchman = extensions.find('hgwatchman') def _hashignore(orig, ignore): return _hashmatcher(ignore) extensions.wrapfunction(hgwatchman, '_hashignore', _hashignore) except KeyError: pass
def uisetup(ui): if ui.plain(): return mode = ui.config('color', 'mode', 'auto') if mode == 'auto': if os.name == 'nt' and 'TERM' not in os.environ: # looks line a cmd.exe console, use win32 API or nothing mode = w32effects and 'win32' or 'none' else: mode = 'ansi' if mode == 'win32': if w32effects is None: # only warn if color.mode is explicitly set to win32 ui.warn(_('win32console not found, please install pywin32\n')) return _effects.update(w32effects) elif mode != 'ansi': return def colorcmd(orig, ui_, opts, cmd, cmdfunc): coloropt = opts['color'] auto = coloropt == 'auto' always = util.parsebool(coloropt) if (always or (always is None and (auto and (os.environ.get('TERM') != 'dumb' and ui_.formatted())))): colorui._colormode = mode colorui.__bases__ = (ui_.__class__,) ui_.__class__ = colorui extstyles() configstyles(ui_) return orig(ui_, opts, cmd, cmdfunc) extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
def uisetup(ui): """Wrap context.changectx to catch FilteredRepoLookupError.""" # uisetup has side effects depending on config. chg only runs uisetup once. # Tell chg to reload if [hiddenerror] config section changes. chgserver._configsections.append('hiddenerror') # Get the error messages from the user's configuration and substitute the # hash in. msgfmt, hintfmt = _getstrings(ui) def _filterederror(orig, repo, rev): # If the number is beyond the changelog, it's a short hash that # just happened to be a number. intrev = None try: intrev = int(rev) except ValueError: pass if intrev is not None and intrev < len(repo): node = repo.unfiltered()[rev].node() shorthash = short(node) msg = msgfmt.format(shorthash) hint = hintfmt and hintfmt.format(shorthash) return error.FilteredRepoLookupError(msg, hint=hint) return orig(repo, rev) extensions.wrapfunction(context, '_filterederror', _filterederror)
def _applypatch(self, repo, patchfile, sim, force=False, **opts): """applies a patch the old fashioned way.""" def epwrapper(orig, *epargs, **epopts): if opts.get('reverse'): epargs[1].append('-R') return orig(*epargs, **epopts) def adwrapper(orig, *adargs, **adopts): if opts.get('reverse'): adopts['reverse'] = True return orig(*adargs, **adopts) epo = extensions.wrapfunction(patch, 'externalpatch', epwrapper) ado = extensions.wrapfunction(patch, 'applydiff', adwrapper) files, success = {}, True try: try: fuzz = patch.patch(self.join(patchfile), self.ui, strip = 1, cwd = repo.root, files = files) updatedir(self.ui, repo, files, similarity = sim/100.) except Exception, inst: self.ui.note(str(inst) + '\n') if not self.ui.verbose: self.ui.warn('patch failed, unable to continue (try -v)\n') success = False finally: patch.externalpatch = epo patch.applydiff = ado return success
def _create_server(orig, ui, app): """wrapper for hgweb.server.create_server to be interruptable""" server = orig(ui, app) server.accesslog = ui server.errorlog = ui # TODO: ui.warn server._serving = False def serve_forever(orig): server._serving = True try: try: while server._serving: server.handle_request() except KeyboardInterrupt: # raised outside try-block around process_request(). # see SocketServer.BaseServer pass finally: server._serving = False server.server_close() def handle_error(orig, request, client_address): type, value, _traceback = sys.exc_info() if issubclass(type, KeyboardInterrupt): server._serving = False else: ui.write_err('%s\n' % value) extensions.wrapfunction(server, 'serve_forever', serve_forever) extensions.wrapfunction(server, 'handle_error', handle_error) return server
def winuisetup(ui): if sys.platform != 'win32' or not win32helper.consolehascp(): return win32helper.uisetup(ui) try: from mercurial import encoding encoding.encoding = 'utf8' except ImportError: util._encoding = "utf-8" def localize(h): if hasattr(ui, '_buffers'): getbuffers = lambda ui: ui._buffers else: getbuffers = lambda ui: ui.buffers def f(orig, ui, *args, **kwds): if not getbuffers(ui): win32helper.rawprint(h, ''.join(args)) else: orig(ui, *args, **kwds) return f extensions.wrapfunction(_ui.ui, "write", localize(win32helper.hStdOut)) extensions.wrapfunction(_ui.ui, "write_err", localize(win32helper.hStdErr))
def _setupwrapper(): """Wrap hgweb.server.create_server to get along with thg""" global _setupwrapper_done if not _setupwrapper_done: extensions.wrapfunction(hgweb.server, 'create_server', _create_server) _setupwrapper_done = True
def uisetup(ui): if ui.plain(): return try: extensions.find('color') except KeyError: ui.warn(_("warning: 'diff-highlight' requires 'color' extension " "to be enabled, but not\n")) return if not isinstance(ui, colorui): colorui.__bases__ = (ui.__class__,) ui.__class__ = colorui def colorconfig(orig, *args, **kwargs): ret = orig(*args, **kwargs) styles = color._styles if INSERT_EMPH not in styles: styles[INSERT_EMPH] = styles[INSERT_NORM] + ' inverse' if DELETE_EMPH not in styles: styles[DELETE_EMPH] = styles[DELETE_NORM] + ' inverse' return ret extensions.wrapfunction(color, 'configstyles', colorconfig)
def exchangepull(orig, repo, remote, *args, **kwargs): # Hook into the callstream/getbundle to insert bundle capabilities # during a pull. def remotecallstream(orig, command, **opts): if command == 'getbundle' and 'remotefilelog' in remote._capabilities(): bundlecaps = opts.get('bundlecaps') if bundlecaps: bundlecaps = [bundlecaps] else: bundlecaps = [] bundlecaps.append('remotefilelog') if repo.includepattern: bundlecaps.append("includepattern=" + '\0'.join(repo.includepattern)) if repo.excludepattern: bundlecaps.append("excludepattern=" + '\0'.join(repo.excludepattern)) opts['bundlecaps'] = ','.join(bundlecaps) return orig(command, **opts) def localgetbundle(orig, source, heads=None, common=None, bundlecaps=None, **kwargs): if not bundlecaps: bundlecaps = set() bundlecaps.add('remotefilelog') return orig(source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs) if hasattr(remote, '_callstream'): wrapfunction(remote, '_callstream', remotecallstream) elif hasattr(remote, 'getbundle'): wrapfunction(remote, 'getbundle', localgetbundle) return orig(repo, remote, *args, **kwargs)
def uisetup__disabled(ui): from mercurial import extensions, hook import sys; sys.excepthook = None extensions.wrapfunction(hook, 'hook', wraphook) global HOOKDEBUG, HOOKLOG HOOKDEBUG = ui.configbool('anyhook', 'debug') HOOKLOG = ui.configbool('anyhook', 'log')
def extsetup(ui): entry = wrapcommand(commands.table, 'push', _push) try: # Don't add the 'to' arg if it already exists extensions.find('remotenames') except KeyError: entry[1].append(('', 'to', '', _('server revision to rebase onto'))) partorder = exchange.b2partsgenorder partorder.insert(partorder.index('changeset'), partorder.pop(partorder.index(rebaseparttype))) partorder.insert(0, partorder.pop(partorder.index(commonheadsparttype))) wrapfunction(discovery, 'checkheads', _checkheads) # we want to disable the heads check because in pushrebase repos, we # expect the heads to change during the push and we should not abort. # The check heads functions are used to verify that the heads haven't # changed since the client did the initial discovery. Pushrebase is meant # to allow concurrent pushes, so the heads may have very well changed. # So let's not do this check. wrapfunction(exchange, 'check_heads', _exchangecheckheads) wrapfunction(exchange, '_pushb2ctxcheckheads', _skipcheckheads) origpushkeyhandler = bundle2.parthandlermapping['pushkey'] newpushkeyhandler = lambda *args, **kwargs: \ bundle2pushkey(origpushkeyhandler, *args, **kwargs) newpushkeyhandler.params = origpushkeyhandler.params bundle2.parthandlermapping['pushkey'] = newpushkeyhandler bundle2.parthandlermapping['b2x:pushkey'] = newpushkeyhandler wrapfunction(exchange, 'unbundle', unbundle) wrapfunction(hg, '_peerorrepo', _peerorrepo)
def uisetup(ui): def replace_similarity(opts): newopts = [] for idx, opt in enumerate(opts): if opt[0] == 's': newopt = list(opt) newopt[3] = 'backwards compatability; implies -g/--guess' opt = tuple(newopt) newopts.append(opt) return newopts ar = list(commands.table['addremove']) docre = re.compile('Use the -s option.+way can be expensive\.', re.M | re.S) ar[0].__doc__ = re.sub(docre, """ Use the -g option to detect renamed files. This option uses a smarter, more accurate algorithm than the built-in -s option. """.strip(), ar[0].__doc__) ar[1] = replace_similarity(ar[1]) ar[1].append(('g', 'guess', False, 'guess renamed files')) commands.table['addremove'] = tuple(ar) import_ = list(commands.table['import|patch']) import_[0].__doc__ = re.sub(r'(--similarity),', r'\1 or -g/--guess,', import_[0].__doc__) import_[1] = opts = replace_similarity(import_[1]) import_[1].append(('g', 'guess', False, 'guess renamed files')) commands.table['import'] = tuple(import_) extensions.wrapfunction(scmutil, 'addremove', addremove)
def uisetup(ui): extensions.wrapfunction(repair, 'strip', strip) extensions.wrapcommand(commands.table, 'update', tasksupdate) extensions.wrapcommand(commands.table, 'log', taskslog) extensions.wrapcommand(commands.table, 'export', tasksexport) entry = extensions.wrapcommand(commands.table, 'push', taskspush) entry[1].append(('', 'completed-tasks', None, _('push all heads that have completed tasks only'))) entry[1].append(('', 'all-tasks', None, _('push all heads including those with incomplete tasks'))) try: transplant = extensions.find('transplant') if transplant: entry = extensions.wrapcommand(transplant.cmdtable, 'transplant', taskstransplant) entry[1].append(('t', 'task', '', _('transplant all changesets in task TASK'))) except: pass try: patchbomb = extensions.find('patchbomb') if patchbomb: entry = extensions.wrapcommand(patchbomb.cmdtable, 'email', tasksemail) entry[1].append(('t', 'task', '', _('email all changesets in task TASK'))) except: pass
def replaceremotefctxannotate(): try: r = extensions.find('remotefilelog') except KeyError: return else: extensions.wrapfunction(r.remotefilectx.remotefilectx, 'annotate', _remotefctxannotate)
def reposetup(ui, repo): if repo.local(): svnrepo.generate_repo_class(ui, repo) for tunnel in ui.configlist('hgsubversion', 'tunnels'): hg.schemes['svn+' + tunnel] = svnrepo if ui.configbool('hgsubversion', 'nativerevs'): extensions.wrapfunction(revset, 'stringset', util.revset_stringset)
def reposetup(ui, repo): if repo.local(): svnrepo.generate_repo_class(ui, repo) for tunnel in ui.configlist("hgsubversion", "tunnels"): hg.schemes["svn+" + tunnel] = svnrepo if ui.configbool("hgsubversion", "nativerevs"): extensions.wrapfunction(revset, "stringset", util.revset_stringset)
def _smartlogloaded(loaded): smartlog = None try: smartlog = extensions.find('smartlog') except KeyError: pass if smartlog: extensions.wrapfunction(smartlog, 'getdag', _getsmartlogdag)
def extsetup(ui): if not conduit_config(ui): ui.warn(_('No conduit host specified in config; disabling fbconduit\n')) return revset.symbols['gitnode'] = gitnode extensions.wrapfunction(revset, 'stringset', overridestringset) revset.methods['string'] = revset.stringset revset.methods['symbol'] = revset.stringset
def diff(orig, ui, repo, *pats, **opts): issparse = bool(opts.get('sparse')) if issparse: extensions.wrapfunction(patch, 'trydiff', trydiff) try: orig(ui, repo, *pats, **opts) finally: if issparse: extensions.unwrapfunction(patch, 'trydiff', trydiff)
def replacefctxannotate(): extensions.wrapfunction(hgcontext.basefilectx, 'annotate', _fctxannotate)
kwargs['oldincludepats'] = include kwargs['oldexcludepats'] = exclude kwargs['includepats'] = include kwargs['excludepats'] = exclude kwargs['known'] = [ node.hex(ctx.node()) for ctx in repo.set('::%ln', pullop.common) if ctx.node() != node.nullid ] if not kwargs['known']: # Mercurial serialized an empty list as '' and deserializes it as # [''], so delete it instead to avoid handling the empty string on the # server. del kwargs['known'] extensions.wrapfunction(exchange, '_pullbundle2extraprepare', pullbundle2extraprepare) def _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes, force): oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes) newmatch = narrowspec.match(repo.root, newincludes, newexcludes) # This is essentially doing "hg outgoing" to find all local-only # commits. We will then check that the local-only commits don't # have any changes to files that will be untracked. unfi = repo.unfiltered() outgoing = discovery.findcommonoutgoing(unfi, remote, commoninc=commoninc) ui.status(_('looking for local changes to affected paths\n')) localnodes = [] for n in itertools.chain(outgoing.missing, outgoing.excluded):
def extsetup(ui): extensions.wrapfunction(context.workingctx, '_checklookup', _checklookup) extensions.wrapfunction(context.committablectx, 'markcommitted', markcommitted)
def extsetup(ui): extensions.wrapfunction(wireproto, '_capabilities', capabilities) extensions.wrapfunction(exchange, '_pullobsolete', exchangepullpushlog) extensions.wrapfunction(webutil, 'commonentry', commonentry)
def defaultdest(source): for scheme in _gitschemes: if source.startswith('%s://' % scheme) and source.endswith('.git'): source = source[:-4] break return hgdefaultdest(source) hg.defaultdest = defaultdest # defend against tracebacks if we specify -r in 'hg pull' def safebranchrevs(orig, lrepo, repo, branches, revs): revs, co = orig(lrepo, repo, branches, revs) if hgutil.safehasattr(lrepo, 'changelog') and co not in lrepo.changelog: co = None return revs, co if getattr(hg, 'addbranchrevs', False): extensions.wrapfunction(hg, 'addbranchrevs', safebranchrevs) def extsetup(ui): templatekw.keywords.update({'gitnode': gitnodekw}) revset.symbols.update({ 'fromgit': revset_fromgit, 'gitnode': revset_gitnode }) helpdir = os.path.join(os.path.dirname(__file__), 'help') entry = (['git'], _("Working with Git Repositories"), lambda: open(os.path.join(helpdir, 'git.rst')).read()) insort(help.helptable, entry) def reposetup(ui, repo): if not isinstance(repo, gitrepo.gitrepo): klass = hgrepo.generate_repo_subclass(repo.__class__) repo.__class__ = klass
def uisetup(ui): # uisetup() is called after the initial dispatch(), so this only makes an # effect on command server extensions.wrapfunction(dispatchmod, '_dispatch', _dispatch)
def extsetup(ui): extensions.wrapfunction(bookmarks.bmstore, 'getbkfile', getbkfile) extensions.wrapfunction(bookmarks.bmstore, 'recordchange', recordchange) extensions.wrapfunction(bookmarks.bmstore, 'write', write)
def extsetup(ui): extensions.wrapfunction(localrepo.localrepository, 'commit', wrappedcommit) extensions.wrapfunction(cmdutil, 'amend', wrappedamend)
def uisetup(ui): # Disable auto-status for some commands which assume that all # files in the result are under Mercurial's control entry = extensions.wrapcommand(commands.table, 'add', overrides.overrideadd) addopt = [('', 'large', None, _('add as largefile')), ('', 'normal', None, _('add as normal file')), ('', 'lfsize', '', _('add all files above this size ' '(in megabytes) as largefiles ' '(default: 10)'))] entry[1].extend(addopt) # The scmutil function is called both by the (trivial) addremove command, # and in the process of handling commit -A (issue3542) entry = extensions.wrapfunction(scmutil, 'addremove', overrides.scmutiladdremove) entry = extensions.wrapcommand(commands.table, 'remove', overrides.overrideremove) entry = extensions.wrapcommand(commands.table, 'forget', overrides.overrideforget) # Subrepos call status function entry = extensions.wrapcommand(commands.table, 'status', overrides.overridestatus) entry = extensions.wrapfunction(subrepo.hgsubrepo, 'status', overrides.overridestatusfn) entry = extensions.wrapcommand(commands.table, 'log', overrides.overridelog) entry = extensions.wrapcommand(commands.table, 'rollback', overrides.overriderollback) entry = extensions.wrapcommand(commands.table, 'verify', overrides.overrideverify) verifyopt = [('', 'large', None, _('verify that all largefiles in current revision exists')), ('', 'lfa', None, _('verify largefiles in all revisions, not just current')), ('', 'lfc', None, _('verify local largefile contents, not just existence'))] entry[1].extend(verifyopt) entry = extensions.wrapcommand(commands.table, 'debugstate', overrides.overridedebugstate) debugstateopt = [('', 'large', None, _('display largefiles dirstate'))] entry[1].extend(debugstateopt) outgoing = lambda orgfunc, *arg, **kwargs: orgfunc(*arg, **kwargs) entry = extensions.wrapcommand(commands.table, 'outgoing', outgoing) outgoingopt = [('', 'large', None, _('display outgoing largefiles'))] entry[1].extend(outgoingopt) cmdutil.outgoinghooks.add('largefiles', overrides.outgoinghook) entry = extensions.wrapcommand(commands.table, 'summary', overrides.overridesummary) summaryopt = [('', 'large', None, _('display outgoing largefiles'))] entry[1].extend(summaryopt) cmdutil.summaryremotehooks.add('largefiles', overrides.summaryremotehook) entry = extensions.wrapcommand(commands.table, 'update', overrides.overrideupdate) entry = extensions.wrapcommand(commands.table, 'pull', overrides.overridepull) pullopt = [('', 'all-largefiles', None, _('download all pulled versions of largefiles (DEPRECATED)')), ('', 'lfrev', [], _('download largefiles for these revisions'), _('REV'))] entry[1].extend(pullopt) revset.symbols['pulled'] = overrides.pulledrevsetsymbol entry = extensions.wrapcommand(commands.table, 'clone', overrides.overrideclone) cloneopt = [('', 'all-largefiles', None, _('download all versions of all largefiles'))] entry[1].extend(cloneopt) entry = extensions.wrapfunction(hg, 'clone', overrides.hgclone) entry = extensions.wrapcommand(commands.table, 'cat', overrides.overridecat) entry = extensions.wrapfunction(merge, '_checkunknownfile', overrides.overridecheckunknownfile) entry = extensions.wrapfunction(merge, 'calculateupdates', overrides.overridecalculateupdates) entry = extensions.wrapfunction(merge, 'recordupdates', overrides.mergerecordupdates) entry = extensions.wrapfunction(merge, 'update', overrides.mergeupdate) entry = extensions.wrapfunction(filemerge, 'filemerge', overrides.overridefilemerge) entry = extensions.wrapfunction(cmdutil, 'copy', overrides.overridecopy) # Summary calls dirty on the subrepos entry = extensions.wrapfunction(subrepo.hgsubrepo, 'dirty', overrides.overridedirty) # Backout calls revert so we need to override both the command and the # function entry = extensions.wrapcommand(commands.table, 'revert', overrides.overriderevert) entry = extensions.wrapfunction(commands, 'revert', overrides.overriderevert) extensions.wrapfunction(archival, 'archive', overrides.overridearchive) extensions.wrapfunction(subrepo.hgsubrepo, 'archive', overrides.hgsubrepoarchive) extensions.wrapfunction(cmdutil, 'bailifchanged', overrides.overridebailifchanged) extensions.wrapfunction(scmutil, 'marktouched', overrides.scmutilmarktouched) # create the new wireproto commands ... wireproto.commands['putlfile'] = (proto.putlfile, 'sha') wireproto.commands['getlfile'] = (proto.getlfile, 'sha') wireproto.commands['statlfile'] = (proto.statlfile, 'sha') # ... and wrap some existing ones wireproto.commands['capabilities'] = (proto.capabilities, '') wireproto.commands['heads'] = (proto.heads, '') wireproto.commands['lheads'] = (wireproto.heads, '') # make putlfile behave the same as push and {get,stat}lfile behave # the same as pull w.r.t. permissions checks hgweb_mod.perms['putlfile'] = 'push' hgweb_mod.perms['getlfile'] = 'pull' hgweb_mod.perms['statlfile'] = 'pull' extensions.wrapfunction(webcommands, 'decodepath', overrides.decodepath) # the hello wireproto command uses wireproto.capabilities, so it won't see # our largefiles capability unless we replace the actual function as well. proto.capabilitiesorig = wireproto.capabilities wireproto.capabilities = proto.capabilities # can't do this in reposetup because it needs to have happened before # wirerepo.__init__ is called proto.ssholdcallstream = sshpeer.sshpeer._callstream proto.httpoldcallstream = httppeer.httppeer._callstream sshpeer.sshpeer._callstream = proto.sshrepocallstream httppeer.httppeer._callstream = proto.httprepocallstream # override some extensions' stuff as well for name, module in extensions.extensions(): if name == 'fetch': extensions.wrapcommand(getattr(module, 'cmdtable'), 'fetch', overrides.overridefetch) if name == 'purge': extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge', overrides.overridepurge) if name == 'rebase': extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase', overrides.overriderebase) if name == 'transplant': extensions.wrapcommand(getattr(module, 'cmdtable'), 'transplant', overrides.overridetransplant) if name == 'convert': convcmd = getattr(module, 'convcmd') hgsink = getattr(convcmd, 'mercurial_sink') extensions.wrapfunction(hgsink, 'before', overrides.mercurialsinkbefore) extensions.wrapfunction(hgsink, 'after', overrides.mercurialsinkafter)
def wraprepo(repo): class shallowrepository(repo.__class__): @util.propertycache def name(self): return self.ui.config('remotefilelog', 'reponame', '') @util.propertycache def fallbackpath(self): return repo.ui.config( "remotefilelog", "fallbackpath", # fallbackrepo is the old, deprecated name repo.ui.config("remotefilelog", "fallbackrepo", repo.ui.config("paths", "default"))) def sparsematch(self, *revs): baseinstance = super(shallowrepository, self) if util.safehasattr(baseinstance, 'sparsematch'): return baseinstance.sparsematch(*revs) return None def file(self, f): if f[0] == '/': f = f[1:] if self.shallowmatch(f): return remotefilelog.remotefilelog(self.sopener, f, self) else: return super(shallowrepository, self).file(f) def filectx(self, path, changeid=None, fileid=None): if self.shallowmatch(path): return remotefilectx.remotefilectx(self, path, changeid, fileid) else: return super(shallowrepository, self).filectx(path, changeid, fileid) @localrepo.unfilteredmethod def commitctx(self, ctx, error=False): """Add a new revision to current repository. Revision information is passed via the context argument. """ # prefetch files that will likely be compared m1 = ctx.p1().manifest() files = [] for f in ctx.modified() + ctx.added(): fparent1 = m1.get(f, nullid) if fparent1 != nullid: files.append((f, hex(fparent1))) self.fileservice.prefetch(files) return super(shallowrepository, self).commitctx(ctx, error=error) def prefetch(self, revs, base=None, pats=None, opts=None): """Prefetches all the necessary file revisions for the given revs """ fallbackpath = self.fallbackpath if fallbackpath: # If we know a rev is on the server, we should fetch the server # version of those files, since our local file versions might # become obsolete if the local commits are stripped. localrevs = repo.revs('outgoing(%s)', fallbackpath) if base is not None and base != nullrev: serverbase = list( repo.revs('first(reverse(::%s) - %ld)', base, localrevs)) if serverbase: base = serverbase[0] else: localrevs = repo mf = repo.manifest if base is not None: mfdict = mf.read(repo[base].manifestnode()) skip = set(mfdict.iteritems()) else: skip = set() # Copy the skip set to start large and avoid constant resizing, # and since it's likely to be very similar to the prefetch set. files = skip.copy() serverfiles = skip.copy() visited = set() visited.add(nullrev) for rev in sorted(revs): ctx = repo[rev] if pats: m = scmutil.match(ctx, pats, opts) sparsematch = repo.sparsematch(rev) mfnode = ctx.manifestnode() mfrev = mf.rev(mfnode) # Decompressing manifests is expensive. # When possible, only read the deltas. p1, p2 = mf.parentrevs(mfrev) if p1 in visited and p2 in visited: mfdict = mf.readfast(mfnode) else: mfdict = mf.read(mfnode) diff = mfdict.iteritems() if pats: diff = (pf for pf in diff if m(pf[0])) if sparsematch: diff = (pf for pf in diff if sparsematch(pf[0])) if rev not in localrevs: serverfiles.update(diff) else: files.update(diff) visited.add(mfrev) files.difference_update(skip) serverfiles.difference_update(skip) # Fetch files known to be on the server if serverfiles: results = [(path, hex(fnode)) for (path, fnode) in serverfiles] repo.fileservice.prefetch(results, force=True) # Fetch files that may or may not be on the server if files: results = [(path, hex(fnode)) for (path, fnode) in files] repo.fileservice.prefetch(results) # Wrap dirstate.status here so we can prefetch all file nodes in # the lookup set before localrepo.status uses them. def status(orig, match, subrepos, ignored, clean, unknown): lookup, status = orig(match, subrepos, ignored, clean, unknown) if lookup: files = [] parents = repo.parents() for fname in lookup: for ctx in parents: if fname in ctx: fnode = ctx.filenode(fname) files.append((fname, hex(fnode))) repo.fileservice.prefetch(files) return lookup, status wrapfunction(repo.dirstate, 'status', status) repo.__class__ = shallowrepository repo.shallowmatch = match.always(repo.root, '') repo.fileservice = fileserverclient.fileserverclient(repo) repo.includepattern = repo.ui.configlist("remotefilelog", "includepattern", None) repo.excludepattern = repo.ui.configlist("remotefilelog", "excludepattern", None) if repo.includepattern or repo.excludepattern: repo.shallowmatch = match.match(repo.root, '', None, repo.includepattern, repo.excludepattern) localpath = os.path.join(repo.sopener.vfs.base, 'data') if not os.path.exists(localpath): os.makedirs(localpath)
def extsetup(ui): extensions.wrapfunction(wireproto, '_capabilities', capabilities)
def extsetup(ui): extensions.wrapfunction(filemerge, '_filemerge', failfilemerge)
def uisetup(ui): extensions.wrapfunction(repair, "strip", strip) if ui.configbool('bookmarks', 'track.current'): extensions.wrapcommand(commands.table, 'update', updatecurbookmark)
def extsetup(ui): extensions.wrapfunction(bookmod, b'updatefromremote', bookmarks_updatefromremote)
def uisetup(ui): """Monkeypatches dispatch._parse to retrieve user command. Overrides file method to return kwfilelog instead of filelog if file matches user configuration. Wraps commit to overwrite configured files with updated keyword substitutions. Monkeypatches patch and webcommands.""" def kwdispatch_parse(orig, ui, args): '''Monkeypatch dispatch._parse to obtain running hg command.''' cmd, func, args, options, cmdoptions = orig(ui, args) kwtools[b'hgcmd'] = cmd return cmd, func, args, options, cmdoptions extensions.wrapfunction(dispatch, b'_parse', kwdispatch_parse) extensions.wrapfunction(context.filectx, b'cmp', kwfilectx_cmp) extensions.wrapfunction(patch.patchfile, b'__init__', kwpatchfile_init) extensions.wrapfunction(patch, b'diff', kwdiff) extensions.wrapfunction(cmdutil, b'amend', kw_amend) extensions.wrapfunction(cmdutil, b'copy', kw_copy) extensions.wrapfunction(cmdutil, b'dorecord', kw_dorecord) for c in nokwwebcommands.split(): extensions.wrapfunction(webcommands, c, kwweb_skip)
def extsetup(ui): extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities) extensions.wrapfunction(exchange, '_pullobsolete', exchange_pull_owner) sshpeer.sshv1peer = sshv1peer
def setup(repo): """Add narrow spec dirstate ignore, block changes outside narrow spec.""" def walk(orig, self, match, subrepos, unknown, ignored, full=True, narrowonly=True): if narrowonly: # hack to not exclude explicitly-specified paths so that they can # be warned later on e.g. dirstate.add() em = matchmod.exact(match._root, match._cwd, match.files()) nm = matchmod.unionmatcher([repo.narrowmatch(), em]) match = matchmod.intersectmatchers(match, nm) return orig(self, match, subrepos, unknown, ignored, full) extensions.wrapfunction(dirstate.dirstate, 'walk', walk) # Prevent adding files that are outside the sparse checkout editfuncs = ['normal', 'add', 'normallookup', 'copy', 'remove', 'merge'] for func in editfuncs: def _wrapper(orig, self, *args): dirstate = repo.dirstate narrowmatch = repo.narrowmatch() for f in args: if f is not None and not narrowmatch(f) and f not in dirstate: raise error.Abort( _("cannot track '%s' - it is outside " + "the narrow clone") % f) return orig(self, *args) extensions.wrapfunction(dirstate.dirstate, func, _wrapper) def filterrebuild(orig, self, parent, allfiles, changedfiles=None): if changedfiles is None: # Rebuilding entire dirstate, let's filter allfiles to match the # narrowspec. allfiles = [f for f in allfiles if repo.narrowmatch()(f)] orig(self, parent, allfiles, changedfiles) extensions.wrapfunction(dirstate.dirstate, 'rebuild', filterrebuild) def _narrowbackupname(backupname): assert 'dirstate' in backupname return backupname.replace('dirstate', narrowspec.FILENAME) def restorebackup(orig, self, tr, backupname): self._opener.rename(_narrowbackupname(backupname), narrowspec.FILENAME, checkambig=True) orig(self, tr, backupname) extensions.wrapfunction(dirstate.dirstate, 'restorebackup', restorebackup) def savebackup(orig, self, tr, backupname): orig(self, tr, backupname) narrowbackupname = _narrowbackupname(backupname) self._opener.tryunlink(narrowbackupname) hgutil.copyfile(self._opener.join(narrowspec.FILENAME), self._opener.join(narrowbackupname), hardlink=True) extensions.wrapfunction(dirstate.dirstate, 'savebackup', savebackup) def clearbackup(orig, self, tr, backupname): orig(self, tr, backupname) self._opener.unlink(_narrowbackupname(backupname)) extensions.wrapfunction(dirstate.dirstate, 'clearbackup', clearbackup)
return hgdefaultdest(source) hg.defaultdest = defaultdest # defend against tracebacks if we specify -r in 'hg pull' def safebranchrevs(orig, lrepo, repo, branches, revs): revs, co = orig(lrepo, repo, branches, revs) if getattr(lrepo, 'changelog', False) and co not in lrepo.changelog: co = None return revs, co if getattr(hg, 'addbranchrevs', False): extensions.wrapfunction(hg, 'addbranchrevs', safebranchrevs) def reposetup(ui, repo): if not isinstance(repo, gitrepo.gitrepo): klass = hgrepo.generate_repo_subclass(repo.__class__) repo.__class__ = klass def gimport(ui, repo, remote_name=None): git = GitHandler(repo, ui) git.import_commits(remote_name) def gexport(ui, repo): git = GitHandler(repo, ui)
def serveruisetup(ui): _registerwireprotocommand() extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities)
def extsetup(ui): extensions.wrapfunction(bookmarks, b'_printbookmarks', wrapprintbookmarks)
def configitems(orig, self, section, untrusted=False): repos = orig(self, section, untrusted) if section == "paths": repos += getzcpaths() return repos def defaultdest(orig, source): for name, path in getzcpaths(): if path == source: return name.encode(encoding.encoding) return orig(source) def cleanupafterdispatch(orig, ui, options, cmd, cmdfunc): try: return orig(ui, options, cmd, cmdfunc) finally: # we need to call close() on the server to notify() the various # threading Conditions and allow the background threads to exit global server if server: server.close() extensions.wrapfunction(dispatch, '_runcommand', cleanupafterdispatch) extensions.wrapfunction(ui.ui, 'config', config) extensions.wrapfunction(ui.ui, 'configitems', configitems) extensions.wrapfunction(hg, 'defaultdest', defaultdest) hgweb_mod.hgweb = hgwebzc hgwebdir_mod.hgwebdir = hgwebdirzc
def reposetup(ui, repo): '''Sets up repo as kwrepo for keyword substitution. Overrides file method to return kwfilelog instead of filelog if file matches user configuration. Wraps commit to overwrite configured files with updated keyword substitutions. Monkeypatches patch and webcommands.''' try: if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split() or '.hg' in util.splitpath(repo.root) or repo._url.startswith('bundle:')): return except AttributeError: pass inc, exc = [], ['.hg*'] for pat, opt in ui.configitems('keyword'): if opt != 'ignore': inc.append(pat) else: exc.append(pat) if not inc: return kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc) class kwrepo(repo.__class__): def file(self, f): if f[0] == '/': f = f[1:] return kwfilelog(self.sopener, kwt, f) def wread(self, filename): data = super(kwrepo, self).wread(filename) return kwt.wread(filename, data) def commit(self, *args, **opts): # use custom commitctx for user commands # other extensions can still wrap repo.commitctx directly self.commitctx = self.kwcommitctx try: return super(kwrepo, self).commit(*args, **opts) finally: del self.commitctx def kwcommitctx(self, ctx, error=False): n = super(kwrepo, self).commitctx(ctx, error) # no lock needed, only called from repo.commit() which already locks if not kwt.postcommit: restrict = kwt.restrict kwt.restrict = True kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()), False, True) kwt.restrict = restrict return n def rollback(self, dryrun=False, force=False): wlock = self.wlock() try: if not dryrun: changed = self['.'].files() ret = super(kwrepo, self).rollback(dryrun, force) if not dryrun: ctx = self['.'] modified, added = _preselect(self[None].status(), changed) kwt.overwrite(ctx, modified, True, True) kwt.overwrite(ctx, added, True, False) return ret finally: wlock.release() # monkeypatches def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None): '''Monkeypatch/wrap patch.patchfile.__init__ to avoid rejects or conflicts due to expanded keywords in working dir.''' orig(self, ui, gp, backend, store, eolmode) # shrink keywords read from working dir self.lines = kwt.shrinklines(self.fname, self.lines) def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None, opts=None, prefix=''): '''Monkeypatch patch.diff to avoid expansion.''' kwt.restrict = True return orig(repo, node1, node2, match, changes, opts, prefix) def kwweb_skip(orig, web, req, tmpl): '''Wraps webcommands.x turning off keyword expansion.''' kwt.match = util.never return orig(web, req, tmpl) def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts): '''Wraps cmdutil.amend expanding keywords after amend.''' wlock = repo.wlock() try: kwt.postcommit = True newid = orig(ui, repo, commitfunc, old, extra, pats, opts) if newid != old.node(): ctx = repo[newid] kwt.restrict = True kwt.overwrite(ctx, ctx.files(), False, True) kwt.restrict = False return newid finally: wlock.release() def kw_copy(orig, ui, repo, pats, opts, rename=False): '''Wraps cmdutil.copy so that copy/rename destinations do not contain expanded keywords. Note that the source of a regular file destination may also be a symlink: hg cp sym x -> x is symlink cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords) For the latter we have to follow the symlink to find out whether its target is configured for expansion and we therefore must unexpand the keywords in the destination.''' wlock = repo.wlock() try: orig(ui, repo, pats, opts, rename) if opts.get('dry_run'): return wctx = repo[None] cwd = repo.getcwd() def haskwsource(dest): '''Returns true if dest is a regular file and configured for expansion or a symlink which points to a file configured for expansion. ''' source = repo.dirstate.copied(dest) if 'l' in wctx.flags(source): source = pathutil.canonpath(repo.root, cwd, os.path.realpath(source)) return kwt.match(source) candidates = [ f for f in repo.dirstate.copies() if 'l' not in wctx.flags(f) and haskwsource(f) ] kwt.overwrite(wctx, candidates, False, False) finally: wlock.release() def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts): '''Wraps record.dorecord expanding keywords after recording.''' wlock = repo.wlock() try: # record returns 0 even when nothing has changed # therefore compare nodes before and after kwt.postcommit = True ctx = repo['.'] wstatus = repo[None].status() ret = orig(ui, repo, commitfunc, *pats, **opts) recctx = repo['.'] if ctx != recctx: modified, added = _preselect(wstatus, recctx.files()) kwt.restrict = False kwt.overwrite(recctx, modified, False, True) kwt.overwrite(recctx, added, False, True, True) kwt.restrict = True return ret finally: wlock.release() def kwfilectx_cmp(orig, self, fctx): # keyword affects data size, comparing wdir and filelog size does # not make sense if (fctx._filerev is None and (self._repo._encodefilterpats or kwt.match(fctx.path()) and 'l' not in fctx.flags() or self.size() - 4 == fctx.size()) or self.size() == fctx.size()): return self._filelog.cmp(self._filenode, fctx.data()) return True extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp) extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init) extensions.wrapfunction(patch, 'diff', kw_diff) extensions.wrapfunction(cmdutil, 'amend', kw_amend) extensions.wrapfunction(cmdutil, 'copy', kw_copy) for c in 'annotate changeset rev filediff diff'.split(): extensions.wrapfunction(webcommands, c, kwweb_skip) for name in recordextensions.split(): try: record = extensions.find(name) extensions.wrapfunction(record, 'dorecord', kw_dorecord) except KeyError: pass repo.__class__ = kwrepo
def afterloaded(loaded): extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
def replacehgwebannotate(): extensions.wrapfunction(hgweb.webutil, 'annotate', _hgwebannotate)
def extsetup(ui): extensions.wrapfunction(bookmarks, b'_getbkfile', getbkfile) extensions.wrapfunction(bookmarks.bmstore, b'_recordchange', recordchange) extensions.wrapfunction(bookmarks.bmstore, b'_writerepo', writerepo) extensions.wrapcommand(commands.table, b'clone', clone)
return server = Zeroconf.Zeroconf(ip) l = listener() Zeroconf.ServiceBrowser(server, "_hg._tcp.local.", l) time.sleep(1) server.close() for v in l.found.values(): n = v.name[:v.name.index('.')] n.replace(" ", "-") u = "http://%s:%s%s" % (socket.inet_ntoa(v.address), v.port, v.properties.get("path", "/")) yield "zc-" + n, u def config(orig, self, section, key, default=None, untrusted=False): if section == "paths" and key.startswith("zc-"): for n, p in getzcpaths(): if n == key: return p return orig(self, section, key, default, untrusted) def configitems(orig, self, section, untrusted=False): r = orig(self, section, untrusted) if section == "paths": r += getzcpaths() return r extensions.wrapfunction(ui.ui, 'config', config) extensions.wrapfunction(ui.ui, 'configitems', configitems) hgweb_mod.hgweb = hgwebzc hgwebdir_mod.hgwebdir = hgwebdirzc
def onetimesetup(ui): """Configures the wireprotocol for both clients and servers. """ global onetime if onetime: return onetime = True # support file content requests wireprotov1server.wireprotocommand(b'x_rfl_getflogheads', b'path', permission=b'pull')(getflogheads) wireprotov1server.wireprotocommand(b'x_rfl_getfiles', b'', permission=b'pull')(getfiles) wireprotov1server.wireprotocommand(b'x_rfl_getfile', b'file node', permission=b'pull')(getfile) class streamstate(object): match = None shallowremote = False noflatmf = False state = streamstate() def stream_out_shallow(repo, proto, other): includepattern = None excludepattern = None raw = other.get(b'includepattern') if raw: includepattern = raw.split(b'\0') raw = other.get(b'excludepattern') if raw: excludepattern = raw.split(b'\0') oldshallow = state.shallowremote oldmatch = state.match oldnoflatmf = state.noflatmf try: state.shallowremote = True state.match = match.always() state.noflatmf = other.get(b'noflatmanifest') == b'True' if includepattern or excludepattern: state.match = match.match(repo.root, b'', None, includepattern, excludepattern) streamres = wireprotov1server.stream(repo, proto) # Force the first value to execute, so the file list is computed # within the try/finally scope first = next(streamres.gen) second = next(streamres.gen) def gen(): yield first yield second for value in streamres.gen: yield value return wireprototypes.streamres(gen()) finally: state.shallowremote = oldshallow state.match = oldmatch state.noflatmf = oldnoflatmf wireprotov1server.commands[b'stream_out_shallow'] = ( stream_out_shallow, b'*', ) # don't clone filelogs to shallow clients def _walkstreamfiles(orig, repo, matcher=None): if state.shallowremote: # if we are shallow ourselves, stream our local commits if shallowutil.isenabled(repo): striplen = len(repo.store.path) + 1 readdir = repo.store.rawvfs.readdir visit = [os.path.join(repo.store.path, b'data')] while visit: p = visit.pop() for f, kind, st in readdir(p, stat=True): fp = p + b'/' + f if kind == stat.S_IFREG: if not fp.endswith(b'.i') and not fp.endswith( b'.d'): n = util.pconvert(fp[striplen:]) yield (store.decodedir(n), n, st.st_size) if kind == stat.S_IFDIR: visit.append(fp) if b'treemanifest' in repo.requirements: for (u, e, s) in repo.store.datafiles(): if u.startswith(b'meta/') and (u.endswith(b'.i') or u.endswith(b'.d')): yield (u, e, s) # Return .d and .i files that do not match the shallow pattern match = state.match if match and not match.always(): for (u, e, s) in repo.store.datafiles(): f = u[5:-2] # trim data/... and .i/.d if not state.match(f): yield (u, e, s) for x in repo.store.topfiles(): if state.noflatmf and x[0][:11] == b'00manifest.': continue yield x elif shallowutil.isenabled(repo): # don't allow cloning from a shallow repo to a full repo # since it would require fetching every version of every # file in order to create the revlogs. raise error.Abort( _(b"Cannot clone from a shallow repo to a full repo.")) else: for x in orig(repo, matcher): yield x extensions.wrapfunction(streamclone, b'_walkstreamfiles', _walkstreamfiles) # expose remotefilelog capabilities def _capabilities(orig, repo, proto): caps = orig(repo, proto) if shallowutil.isenabled(repo) or ui.configbool( b'remotefilelog', b'server'): if isinstance(proto, _sshv1server): # legacy getfiles method which only works over ssh caps.append(constants.NETWORK_CAP_LEGACY_SSH_GETFILES) caps.append(b'x_rfl_getflogheads') caps.append(b'x_rfl_getfile') return caps extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities) def _adjustlinkrev(orig, self, *args, **kwargs): # When generating file blobs, taking the real path is too slow on large # repos, so force it to just return the linkrev directly. repo = self._repo if util.safehasattr(repo, b'forcelinkrev') and repo.forcelinkrev: return self._filelog.linkrev(self._filelog.rev(self._filenode)) return orig(self, *args, **kwargs) extensions.wrapfunction(context.basefilectx, b'_adjustlinkrev', _adjustlinkrev) def _iscmd(orig, cmd): if cmd == b'x_rfl_getfiles': return False return orig(cmd) extensions.wrapfunction(wireprotoserver, b'iscmd', _iscmd)
def extsetup(ui): extensions.wrapfunction(exchange, '_pushbookmark', wrappedpushbookmark)
repos = orig(self, section, *args, **kwargs) if section == "paths": repos += getzcpaths() return repos def defaultdest(orig, source): for name, path in getzcpaths(): if path == source: return name.encode(encoding.encoding) return orig(source) def cleanupafterdispatch(orig, ui, options, cmd, cmdfunc): try: return orig(ui, options, cmd, cmdfunc) finally: # we need to call close() on the server to notify() the various # threading Conditions and allow the background threads to exit global server if server: server.close() extensions.wrapfunction(dispatch, '_runcommand', cleanupafterdispatch) extensions.wrapfunction(ui.ui, 'config', config) extensions.wrapfunction(ui.ui, 'configitems', configitems) extensions.wrapfunction(hg, 'defaultdest', defaultdest) extensions.wrapfunction(servermod, 'create_server', zc_create_server)
def clone(orig, ui, source, dest=None, **opts): """ Some of the options listed below only apply to Subversion %(target)s. See 'hg help %(extension)s' for more information on them as well as other ways of customising the conversion process. """ data = {} def hgclonewrapper(orig, ui, *args, **opts): origsource = args[1] if isinstance(origsource, str): source, branch, checkout = util.parseurl(ui.expandpath(origsource), opts.get('branch')) srcrepo = getpeer(ui, opts, source) else: srcrepo = origsource if srcrepo.capable('subversion'): branches = opts.pop('branch', None) if branches: data['branches'] = branches ui.setconfig('hgsubversion', 'branch', branches[-1]) data['srcrepo'], data['dstrepo'] = orig(ui, *args, **opts) return data['srcrepo'], data['dstrepo'] for opt, (section, name) in optionmap.iteritems(): if opt in opts and opts[opt]: ui.setconfig(section, name, str(opts.pop(opt))) # calling hg.clone directoly to get the repository instances it returns, # breaks in subtle ways, so we double-wrap orighgclone = None try: orighgclone = extensions.wrapfunction(hg, 'clone', hgclonewrapper) orig(ui, source, dest, **opts) finally: if orighgclone: hg.clone = orighgclone # do this again; the ui instance isn't shared between the wrappers if data.get('branches'): ui.setconfig('hgsubversion', 'branch', data['branches'][-1]) dstrepo = data.get('dstrepo') srcrepo = data.get('srcrepo') dst = dstrepo.local() if dstrepo.local() and srcrepo.capable('subversion'): dst = dstrepo.local() try: # hg before 4.5 requires text=True here fd = dst.vfs("hgrc", "a", text=True) except TypeError: fd = dst.vfs("hgrc", "a") preservesections = set(s for s, v in optionmap.itervalues()) preservesections |= extrasections for section in preservesections: config = dict(ui.configitems(section)) for name in dontretain[section]: config.pop(name, None) if config: fd.write('\n[%s]\n' % section) map(fd.write, ('%s = %s\n' % p for p in config.iteritems()))