def __init__(self, ui, repotype, path): converter_sink.__init__(self, ui, repotype, path) commandline.__init__(self, ui, "svn") self.delete = [] self.setexec = [] self.delexec = [] self.copies = [] self.wc = None self.cwd = pycompat.getcwd() created = False if os.path.isfile(os.path.join(path, ".svn", "entries")): self.wc = os.path.realpath(path) self.run0("update") else: if not re.search(r"^(file|http|https|svn|svn\+ssh)\://", path): path = os.path.realpath(path) if os.path.isdir(os.path.dirname(path)): if not os.path.exists(os.path.join(path, "db", "fs-type")): ui.status( _("initializing svn repository %r\n") % os.path.basename(path) ) commandline(ui, "svnadmin").run0("create", path) created = path path = util.normpath(path) if not path.startswith("/"): path = "/" + path path = "file://" + path wcpath = os.path.join(pycompat.getcwd(), os.path.basename(path) + "-wc") ui.status( _("initializing svn working copy %r\n") % os.path.basename(wcpath) ) self.run0("checkout", path, wcpath) self.wc = wcpath self.opener = vfsmod.vfs(self.wc) self.wopener = vfsmod.vfs(self.wc) self.childmap = mapfile(ui, self.join("hg-childmap")) if util.checkexec(self.wc): self.is_exec = util.isexec else: self.is_exec = None if created: hook = os.path.join(created, "hooks", "pre-revprop-change") fp = open(hook, "w") fp.write(pre_revprop_change) fp.close() util.setflags(hook, False, True) output = self.run0("info") self.uuid = self.uuid_re.search(output).group(1).strip()
def graphqlgetdiff(repo, diffid): """Resolves a phabricator Diff number to a commit hash of it's latest version""" if util.istest(): hexnode = repo.ui.config("phrevset", "mock-D%s" % diffid) if hexnode: return { "source_control_system": "hg", "description": "Commit rCALLSIGN{}".format(hexnode), "phabricator_version_properties": { "edges": [ { "node": { "property_name": "local:commits", "property_value": json.dumps( {hexnode: {"commit": hexnode, "rev": hexnode}} ), } } ] }, } timeout = repo.ui.configint("ssl", "timeout", 10) ca_certs = repo.ui.configpath("web", "cacerts") try: client = graphql.Client( repodir=pycompat.getcwd(), ca_bundle=ca_certs, repo=repo ) return client.getdifflatestversion(timeout, diffid) except Exception as e: raise error.Abort( "Could not call phabricator graphql API: %s" % e, hint="perhaps you need to connect to the VPN or run 'jf auth'?", )
def _findbundle(repo, rev): """Returns the backup bundle that contains the given rev. If found, it returns the bundle peer and the full rev hash. If not found, it return None and the given rev value. """ ui = repo.ui backuppath = repo.localvfs.join("strip-backup") backups = filter(os.path.isfile, glob.glob(backuppath + "/*.hg")) backups.sort(key=lambda x: os.path.getmtime(x), reverse=True) for backup in backups: # Much of this is copied from the hg incoming logic source = os.path.relpath(backup, pycompat.getcwd()) source = ui.expandpath(source) source, branches = hg.parseurl(source) other = hg.peer(repo, {}, source) quiet = ui.quiet try: ui.quiet = True ret = bundlerepo.getremotechanges(ui, repo, other, None, None, None) localother, chlist, cleanupfn = ret for node in chlist: if hex(node).startswith(rev): return other, node except error.LookupError: continue finally: ui.quiet = quiet return None, rev
def _execute(ui, repo, target=None): script = ui.config("stablerev", "script") if script is None: raise error.ConfigError(_("must set stablerev.script")) # Pass '--target $TARGET' for compatibility. # XXX: Remove this once the new code has been rolled out for some time. if target is not None: script += " --target %s" % util.shellquote(target) try: ui.debug("repo-specific script for stable: %s\n" % script) reporoot = repo.wvfs.join("") env = encoding.environ.copy() env.update({"REAL_CWD": pycompat.getcwd(), "HG_ROOT": reporoot}) if target is not None: env["TARGET"] = target ui.debug("setting current working directory to: %s\n" % reporoot) p = subprocess.Popen( script, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=util.closefds, cwd=reporoot, env=env, ) res = p.communicate() ui.debug("stable script returns: %r\n" % (res, )) return res except subprocess.CalledProcessError as e: raise error.Abort(_("couldn't fetch stable rev: %s") % e)
def conflictsmsg(repo, ui): mergestate = mergemod.mergestate.read(repo) if not mergestate.active(): return m = scmutil.match(repo[None]) unresolvedlist = [f for f in mergestate if m(f) and mergestate[f] == "u"] if unresolvedlist: mergeliststr = "\n".join( [ " %s" % os.path.relpath(os.path.join(repo.root, path), pycompat.getcwd()) for path in unresolvedlist ] ) msg = ( _( """Unresolved merge conflicts: %s To mark files as resolved: hg resolve --mark FILE""" ) % mergeliststr ) else: msg = _("No unresolved merge conflicts.") ui.warn(prefixlines(msg))
def getdiff(repo, diffid): """Resolves a phabricator Diff number to a commit hash of it's latest version """ timeout = repo.ui.configint("ssl", "timeout", 10) ca_certs = repo.ui.configpath("web", "cacerts") try: client = graphql.Client( repodir=pycompat.getcwd(), ca_bundle=ca_certs, repo=repo ) return client.getdifflatestversion(timeout, diffid) except Exception as e: raise error.Abort( "Could not call phabricator graphql API: %s" % e, hint="perhaps you need to run 'jf auth'?", )
def getdiffstatus(repo, *diffid): """Perform a Conduit API call to get the diff status Returns status of the diff""" if not diffid: return [] timeout = repo.ui.configint("ssl", "timeout", 10) signalstatus = repo.ui.configbool("ssl", "signal_status", True) ca_certs = repo.ui.configpath("web", "cacerts") try: client = graphql.Client(repodir=pycompat.getcwd(), ca_bundle=ca_certs, repo=repo) statuses = client.getrevisioninfo(timeout, signalstatus, diffid) except arcconfig.ArcConfigError as ex: msg = _( "arcconfig configuration problem. No diff information can be provided.\n" ) hint = _("Error info: %s\n") % str(ex) ret = _fail(repo, diffid, msg, hint) return ret except (graphql.ClientError, ssl.SSLError, socket.timeout) as ex: msg = _( "Error talking to phabricator. No diff information can be provided.\n" ) hint = _("Error info: %s\n") % str(ex) ret = _fail(repo, diffid, msg, hint) return ret except ValueError as ex: msg = _( "Error decoding GraphQL response. No diff information can be provided.\n" ) hint = _("Error info: %s\n") % str(ex) ret = _fail(repo, diffid, msg, hint) return ret # This makes the code more robust in case we don't learn about any # particular revision result = [] for diff in diffid: matchingresponse = statuses.get(str(diff)) if not matchingresponse: result.append("Error") else: result.append(matchingresponse) return result
def _matchpaths(repo, rev, pats, opts, aopts=facontext.defaultopts): """generate paths matching given patterns""" perfhack = repo.ui.configbool("fastannotate", "perfhack") # disable perfhack if: # a) any walkopt is used # b) if we treat pats as plain file names, some of them do not have # corresponding linelog files if perfhack: # cwd related to reporoot reporoot = os.path.dirname(repo.path) reldir = os.path.relpath(pycompat.getcwd(), reporoot) if reldir == ".": reldir = "" if any(opts.get(o[1]) for o in commands.walkopts): # a) perfhack = False else: # b) relpats = [ os.path.relpath(p, reporoot) if os.path.isabs(p) else p for p in pats ] # disable perfhack on '..' since it allows escaping from the repo if any( ( ".." in f or not os.path.isfile( facontext.pathhelper(repo, f, aopts).linelogpath ) ) for f in relpats ): perfhack = False # perfhack: emit paths directory without checking with manifest # this can be incorrect if the rev dos not have file. if perfhack: for p in relpats: yield os.path.join(reldir, p) else: def bad(x, y): raise error.Abort("%s: %s" % (x, y)) ctx = scmutil.revsingle(repo, rev) m = scmutil.match(ctx, pats, opts, badfn=bad) for p in ctx.walk(m): yield p
def __init__(self, repodir=None, ca_bundle=None, repo=None): if repo is not None: if repodir is None: repodir = repo.root if ca_bundle is None: ca_bundle = repo.ui.configpath("web", "cacerts") if not repodir: repodir = pycompat.getcwd() self._mock = "HG_ARC_CONDUIT_MOCK" in encoding.environ if self._mock: with open(encoding.environ["HG_ARC_CONDUIT_MOCK"], "r") as f: self._mocked_responses = json.load(f) # reverse since we want to use pop but still get items in # original order self._mocked_responses.reverse() self._host = None self._user = None self._cert = None self._oauth = None self._catslocation = None self._cats = None self.ca_bundle = ca_bundle or True self._applyarcconfig(arcconfig.loadforpath(repodir), repo.ui.config("phabricator", "arcrc_host")) if not self._mock: app_id = repo.ui.config("phabricator", "graphql_app_id") app_token = repo.ui.config("phabricator", "graphql_app_token") self._host = repo.ui.config("phabricator", "graphql_host") if app_id is None or app_token is None or self._host is None: raise GraphQLConfigError( "GraphQL unavailable because of missing configuration") self._client = phabricator_graphql_client.PhabricatorGraphQLClient( phabricator_graphql_client_urllib. PhabricatorGraphQLClientRequests(), self._cert, self._oauth, self._cats, self._user, "phabricator", self._host, app_id, app_token, )
def _executescript(script, repo, extraenv=None): ui = repo.ui ui.debug("Executing script: %s\n" % script) reporoot = repo.wvfs.join("") env = encoding.environ.copy() env.update({"REAL_CWD": pycompat.getcwd(), "HG_ROOT": reporoot}) if extraenv: env.update(extraenv) ui.debug("setting current working directory to: %s\n" % reporoot) try: p = subprocess.Popen( args=script, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=util.closefds, cwd=reporoot, env=env, ) res = p.communicate() except subprocess.CalledProcessError as e: raise error.Abort(_("error executing script: %s") % e) stdout = pycompat.decodeutf8(res[0]) stderr = pycompat.decodeutf8(res[1]) ui.debug("script stdout:\n%s\n" % stdout) ui.write_err(stderr) if p.returncode: error.Abort( _("script returned non-zero return code: %d") % p.returncode) return stdout
def __init__(self, ui, path): super(repo_commandline, self).__init__(ui, "repo") self.cwd = pycompat.getcwd() self.repopath = path
def _makerage(ui, repo, **opts): # Make graphlog shorter. configoverrides = {("experimental", "graphshorten"): "1"} def hgcmd(cmdname, *args, **additional_opts): cmd, opts = cmdutil.getcmdanddefaultopts(cmdname, commands.table) opts.update(additional_opts) _repo = repo if "_repo" in opts: _repo = opts["_repo"] del opts["_repo"] # If we failed to popbuffer for some reason, do not mess up with the # main `ui` object. newui = ui.copy() newui.pushbuffer(error=True) try: with ui.configoverride(configoverrides, "rage"): if cmd.norepo: cmd(newui, *args, **opts) else: cmd(newui, _repo, *args, **opts) finally: return newui.popbuffer() basic = [ ("date", lambda: time.ctime()), ("unixname", lambda: encoding.environ.get("LOGNAME")), ("hostname", lambda: socket.gethostname()), ("repo location", lambda: repo.root), ("cwd", lambda: pycompat.getcwd()), ("fstype", lambda: util.getfstype(repo.root)), ("active bookmark", lambda: bookmarks._readactive(repo, repo._bookmarks)), ( "hg version", lambda: __import__( "edenscm.mercurial.__version__" ).mercurial.__version__.version, ), ("obsstore size", lambda: str(repo.svfs.stat("obsstore").st_size)), ] oldcolormode = ui._colormode ui._colormode = None detailed = [ ("df -h", lambda: shcmd("df -h", check=False)), # smartlog as the user sees it ("hg sl", lambda: hgcmd("smartlog", template="{sl_debug}")), # unfiltered smartlog for recent hidden changesets, including full # node identity ( "hg sl --master='interestingmaster()' -r 'predecessors(draft())'", lambda: hgcmd( "smartlog", master="interestingmaster()", rev=["predecessors(draft())"], _repo=repo.unfiltered(), template='{sub("\\n", " ", "{node} {sl_debug}")}', ), ), ( 'first 20 lines of "hg status"', lambda: "\n".join(hgcmd("status").splitlines()[:20]), ), ( "hg blackbox", lambda: "\n".join( hgcmd("blackbox", pattern=BLACKBOX_PATTERN).splitlines()[-500:] ), ), ("hg summary", lambda: hgcmd("summary")), ("hg cloud status", lambda: hgcmd("cloud status")), ("hg debugprocesstree", lambda: hgcmd("debugprocesstree")), ("hg config (local)", lambda: "\n".join(localconfig(ui))), ("hg sparse show", lambda: hgcmd("sparse show")), ("hg debuginstall", lambda: hgcmd("debuginstall")), ("usechg", (usechginfo)), ( "uptime", lambda: shcmd( "wmic path Win32_OperatingSystem get LastBootUpTime" if pycompat.iswindows else "uptime" ), ), ("rpm info", (partial(rpminfo, ui))), ("klist", lambda: shcmd("klist", check=False)), ("ifconfig", lambda: shcmd("ipconfig" if pycompat.iswindows else "ifconfig")), ( "airport", lambda: shcmd( "/System/Library/PrivateFrameworks/Apple80211." + "framework/Versions/Current/Resources/airport " + "--getinfo", check=False, ), ), ( 'last 100 lines of "hg debugobsolete"', lambda: "\n".join(hgcmd("debugobsolete").splitlines()[-100:]), ), ("infinitepush backup state", lambda: readinfinitepushbackupstate(repo)), ("commit cloud workspace sync state", lambda: readcommitcloudstate(repo)), ( "infinitepush / commitcloud backup logs", lambda: infinitepushbackuplogs(ui, repo), ), ("scm daemon logs", lambda: scmdaemonlog(ui, repo)), ("debugstatus", lambda: hgcmd("debugstatus")), ("debugtree", lambda: hgcmd("debugtree")), ("hg config (overrides)", lambda: "\n".join(overriddenconfig(ui))), ("edenfs rage", lambda: shcmd("edenfsctl rage --stdout")), ( "environment variables", lambda: "\n".join( sorted(["{}={}".format(k, v) for k, v in encoding.environ.items()]) ), ), ("ssh config", lambda: shcmd("ssh -G hg.vip.facebook.com", check=False)), ] msg = "" if util.safehasattr(repo, "name"): # Add the contents of both local and shared pack directories. packlocs = { "local": lambda category: shallowutil.getlocalpackpath( repo.svfs.vfs.base, category ), "shared": lambda category: shallowutil.getcachepackpath(repo, category), } for loc, getpath in packlocs.iteritems(): for category in constants.ALL_CATEGORIES: path = getpath(category) detailed.append( ( "%s packs (%s)" % (loc, constants.getunits(category)), lambda path=path: "%s:\n%s" % ( path, shcmd( "dir /o-s %s" % os.path.normpath(path) if pycompat.iswindows else "ls -lhS %s" % path ), ), ) ) footnotes = [] timeout = opts.get("timeout") or 20 def _failsafe(gen, timeout=timeout): class TimedOut(RuntimeError): pass def target(result, gen): try: result.append(gen()) except TimedOut: return except Exception as ex: index = len(footnotes) + 1 footnotes.append( "[%d]: %s\n%s\n\n" % (index, str(ex), traceback.format_exc()) ) result.append("(Failed. See footnote [%d])" % index) result = [] thread = threading.Thread(target=target, args=(result, gen)) thread.daemon = True thread.start() thread.join(timeout) if result: value = result[0] return value else: if thread.is_alive(): # Attempt to stop the thread, since hg is not thread safe. # There is no pure Python API to interrupt a thread. # But CPython C API can do that. ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(thread.ident), ctypes.py_object(TimedOut) ) return ( "(Did not complete in %s seconds, rerun with a larger --timeout to collect this)" % timeout ) msg = [] profile = [] allstart = time.time() for name, gen in basic: msg.append("%s: %s\n\n" % (name, _failsafe(gen))) profile.append((time.time() - allstart, "basic info", None)) for name, gen in detailed: start = time.time() with progress.spinner(ui, "collecting %r" % name): value = _failsafe(gen) finish = time.time() msg.append( "%s: (%.2f s)\n---------------------------\n%s\n\n" % (name, finish - start, value) ) profile.append((finish - start, name, value.count("\n"))) allfinish = time.time() profile.append((allfinish - allstart, "total time", None)) msg.append("hg rage profile:\n") width = max([len(name) for _t, name, _l in profile]) for timetaken, name, lines in reversed(sorted(profile)): m = " %-*s %8.2f s" % (width + 1, name + ":", timetaken) if lines is not None: msg.append("%s for %4d lines\n" % (m, lines)) else: msg.append("%s\n" % m) msg.append("\n") msg.extend(footnotes) msg = "".join(msg) ui._colormode = oldcolormode return msg
def _makerage(ui, repo, **opts): configoverrides = { # Make graphlog shorter. ("experimental", "graphshorten"): "1", # Force use of lines-square renderer, as the user's configuration may # not render properly in a text file. ("experimental", "graph.renderer"): "lines-square", # Reduce the amount of data used for debugnetwork speed tests to # increase the chance they complete within 20s. ("debugnetwork", "speed-test-download-size"): "4M", ("debugnetwork", "speed-test-upload-size"): "1M", } # Override the encoding to "UTF-8" to generate the rage in UTF-8. oldencoding = encoding.encoding oldencodingmode = encoding.encodingmode encoding.encoding = "UTF-8" encoding.encodingmode = "replace" def hgcmd(cmdname, *args, **additional_opts): cmd, opts = cmdutil.getcmdanddefaultopts(cmdname, commands.table) opts.update(additional_opts) _repo = repo if "_repo" in opts: _repo = opts["_repo"] del opts["_repo"] # If we failed to popbuffer for some reason, do not mess up with the # main `ui` object. newui = ui.copy() newui.pushbuffer(error=True, subproc=True) newui._colormode = None def remoteui(orig, src, opts): rui = orig(src, opts) rui._outputui = newui return rui try: with newui.configoverride(configoverrides, "rage"), extensions.wrappedfunction( hg, "remoteui", remoteui): if cmd.norepo: cmd(newui, *args, **opts) else: cmd(newui, _repo, *args, **opts) finally: return newui.popbuffer() basic = [ ("date", lambda: time.ctime()), ("unixname", lambda: encoding.environ.get("LOGNAME")), ("hostname", lambda: socket.gethostname()), ("repo location", lambda: repo.root), ("cwd", lambda: pycompat.getcwd()), ("fstype", lambda: util.getfstype(repo.root)), ("active bookmark", lambda: bookmarks._readactive(repo, repo._bookmarks)), ( "hg version", lambda: __import__("edenscm.mercurial.__version__").mercurial. __version__.version, ), ] def _edenfs_rage(): ragecmd = "edenfsctl rage --stdout" if opts.get("preview"): return shcmd(ragecmd + " --dry-run") return shcmd(ragecmd) detailed = [ ( "disk space usage", lambda: shcmd( "wmic LogicalDisk Where DriveType=3 Get DeviceId,FileSystem,FreeSpace,Size" if pycompat.iswindows else "df -h", check=False, ), ), # smartlog as the user sees it ("hg sl", lambda: hgcmd("smartlog", template="{sl_debug}")), ( "hg debugmetalog -t 'since 2d ago'", lambda: hgcmd("debugmetalog", time_range=["since 2d ago"]), ), ( 'first 20 lines of "hg status"', lambda: "\n".join(hgcmd("status").splitlines()[:20]), ), ( "hg debugmutation -r 'draft() & date(-4)' -t 'since 4d ago'", lambda: hgcmd("debugmutation", rev=["draft() & date(-4)"], time_range=["since 4d ago"]), ), ( "hg bookmarks --list-subscriptions", lambda: hgcmd("bookmarks", list_subscriptions=True), ), ("sigtrace", lambda: readsigtraces(repo)), ( "hg blackbox", lambda: "\n".join( hgcmd("blackbox", pattern=BLACKBOX_PATTERN).splitlines()[-500:] ), ), ("hg summary", lambda: hgcmd("summary")), ("hg cloud status", lambda: hgcmd("cloud status")), ("hg debugprocesstree", lambda: hgcmd("debugprocesstree")), ("hg config (local)", lambda: "\n".join(localconfig(ui))), ("hg sparse", lambda: hgcmd("sparse")), ("hg debugchangelog", lambda: hgcmd("debugchangelog")), ("hg debugexpandpaths", lambda: hgcmd("debugexpandpaths")), ("hg debuginstall", lambda: hgcmd("debuginstall")), ("hg debugdetectissues", lambda: hgcmd("debugdetectissues")), ("usechg", usechginfo), ( "uptime", lambda: shcmd("wmic path Win32_OperatingSystem get LastBootUpTime" if pycompat.iswindows else "uptime"), ), ("rpm info", (partial(rpminfo, ui))), ("klist", lambda: shcmd("klist", check=False)), ("ifconfig", lambda: shcmd("ipconfig" if pycompat.iswindows else "ifconfig")), ( "airport", lambda: shcmd( "/System/Library/PrivateFrameworks/Apple80211." + "framework/Versions/Current/Resources/airport " + "--getinfo", check=False, ), ), ("hg debugnetwork", lambda: hgcmd("debugnetwork")), ("infinitepush backup state", lambda: readinfinitepushbackupstate(repo)), ("commit cloud workspace sync state", lambda: readcommitcloudstate(repo)), ( "infinitepush / commitcloud backup logs", lambda: infinitepushbackuplogs(ui, repo), ), ("scm daemon logs", lambda: scmdaemonlog(ui, repo)), ("debugstatus", lambda: hgcmd("debugstatus")), ("debugtree", lambda: hgcmd("debugtree")), ("hg config (all)", lambda: "\n".join(allconfig(ui))), ("edenfs rage", _edenfs_rage), ( "environment variables", lambda: "\n".join( sorted([ "{}={}".format(k, v) for k, v in encoding.environ.items() ])), ), ("ssh config", lambda: shcmd("ssh -G hg.vip.facebook.com", check=False)), ("debuglocks", lambda: hgcmd("debuglocks")), ("x2pagentd info", lambda: checkproxyagentstate(ui)), ] msg = "" if util.safehasattr(repo, "name"): # Add the contents of both local and shared pack directories. packlocs = { "local": lambda category: shallowutil.getlocalpackpath( repo.svfs.vfs.base, category), "shared": lambda category: shallowutil.getcachepackpath(repo, category), } for loc, getpath in pycompat.iteritems(packlocs): for category in constants.ALL_CATEGORIES: path = getpath(category) detailed.append(( "%s packs (%s)" % (loc, constants.getunits(category)), lambda path=path: "%s:\n%s" % ( path, shcmd("dir /o-s %s" % os.path.normpath(path) if pycompat.iswindows else "ls -lhS %s" % path), ), )) footnotes = [] timeout = opts.get("timeout") or 20 def _failsafe(gen, timeout=timeout): class TimedOut(RuntimeError): pass def target(result, gen): try: result.append(gen()) except TimedOut: return except Exception as ex: index = len(footnotes) + 1 footnotes.append("[%d]: %s\n%s\n\n" % (index, str(ex), traceback.format_exc())) result.append("(Failed. See footnote [%d])" % index) result = [] thread = threading.Thread(target=target, args=(result, gen)) thread.daemon = True thread.start() thread.join(timeout) if result: value = result[0] return value else: if thread.is_alive(): # Attempt to stop the thread, since hg is not thread safe. # There is no pure Python API to interrupt a thread. # But CPython C API can do that. ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(thread.ident), ctypes.py_object(TimedOut)) return ( "(Did not complete in %s seconds, rerun with a larger --timeout to collect this)" % timeout) msg = [] profile = [] allstart = time.time() for name, gen in basic: msg.append("%s: %s\n\n" % (name, _failsafe(gen))) profile.append((time.time() - allstart, "basic info", None)) for name, gen in detailed: start = time.time() with progress.spinner(ui, "collecting %r" % name): value = _failsafe(gen) finish = time.time() msg.append("%s: (%.2f s)\n---------------------------\n%s\n\n" % (name, finish - start, value)) profile.append((finish - start, name, value.count("\n"))) allfinish = time.time() profile.append((allfinish - allstart, "total time", None)) msg.append("hg rage profile:\n") width = max([len(name) for _t, name, _l in profile]) for timetaken, name, lines in reversed(sorted(profile)): m = " %-*s %8.2f s" % (width + 1, name + ":", timetaken) if lines is not None: msg.append("%s for %4d lines\n" % (m, lines)) else: msg.append("%s\n" % m) msg.append("\n") msg.extend(footnotes) msg = "".join(msg) encoding.encoding = oldencoding encoding.encodingmode = oldencodingmode return msg
def _makerage(ui, repo, **opts): configoverrides = { # Make graphlog shorter. ("experimental", "graphshorten"): "1", # Force use of lines-square renderer, as the user's configuration may # not render properly in a text file. ("experimental", "graph.renderer"): "lines-square", # Reduce the amount of data used for debugnetwork speed tests to # increase the chance they complete within 20s. ("debugnetwork", "speed-test-download-size"): "4M", ("debugnetwork", "speed-test-upload-size"): "1M", } # Override the encoding to "UTF-8" to generate the rage in UTF-8. oldencoding = encoding.encoding oldencodingmode = encoding.encodingmode encoding.encoding = "UTF-8" encoding.encodingmode = "replace" def hgcmd(cmdname, *args, **additional_opts): cmdargs = ["hg", *cmdname.split(), *args] for flagname, flagvalue in additional_opts.items(): flagname = flagname.replace("_", "-") if isinstance(flagvalue, list): cmdargs += [f"--{flagname}={v}" for v in flagvalue] else: cmdargs += [f"--{flagname}={flagvalue}"] fin = util.stringio() fout = ferr = util.stringio() status = bindings.commands.run(cmdargs, fin, fout, ferr) output = fout.getvalue().decode() if status != 0: output += f"[{status}]\n" return output basic = [ ("date", lambda: time.ctime()), ("unixname", lambda: encoding.environ.get("LOGNAME")), ("hostname", lambda: socket.gethostname()), ("repo location", lambda: repo.root), ("cwd", lambda: pycompat.getcwd()), ("fstype", lambda: util.getfstype(repo.root)), ("active bookmark", lambda: bookmarks._readactive(repo, repo._bookmarks)), ( "hg version", lambda: __import__( "edenscm.mercurial.__version__" ).mercurial.__version__.version, ), ] def _edenfs_rage(): ragecmd = "edenfsctl rage --stdout" if opts.get("preview"): return shcmd(ragecmd + " --dry-run") return shcmd(ragecmd) detailed = [ ( "disk space usage", lambda: shcmd( "wmic LogicalDisk Where DriveType=3 Get DeviceId,FileSystem,FreeSpace,Size" if pycompat.iswindows else "df -h", check=False, ), ), # smartlog as the user sees it ("hg sl", lambda: hgcmd("smartlog", template="{sl_debug}")), ( "hg debugmetalog -t 'since 2d ago'", lambda: hgcmd("debugmetalog", time_range=["since 2d ago"]), ), ( 'first 20 lines of "hg status"', lambda: "\n".join(hgcmd("status").splitlines()[:20]), ), ( "hg debugmutation -r 'draft() & date(-4)' -t 'since 4d ago'", lambda: hgcmd( "debugmutation", rev=["draft() & date(-4)"], time_range=["since 4d ago"] ), ), ( "hg bookmarks --list-subscriptions", lambda: hgcmd("bookmarks", list_subscriptions=True), ), ("sigtrace", lambda: readsigtraces(repo)), ( "hg blackbox", lambda: "\n".join( hgcmd("blackbox", pattern=BLACKBOX_PATTERN).splitlines()[-500:] ), ), ("hg summary", lambda: hgcmd("summary")), ("hg cloud status", lambda: hgcmd("cloud status")), ("hg debugprocesstree", lambda: hgcmd("debugprocesstree")), ("hg config (local)", lambda: "\n".join(localconfig(ui))), ("hg sparse", lambda: hgcmd("sparse")), ("hg debugchangelog", lambda: hgcmd("debugchangelog")), ("hg debugexpandpaths", lambda: hgcmd("debugexpandpaths")), ("hg debuginstall", lambda: hgcmd("debuginstall")), ("hg debugdetectissues", lambda: hgcmd("debugdetectissues")), ("usechg", usechginfo), ( "uptime", lambda: shcmd( "wmic path Win32_OperatingSystem get LastBootUpTime" if pycompat.iswindows else "uptime" ), ), ("rpm info", (partial(rpminfo, ui))), ("klist", lambda: shcmd("klist", check=False)), ("ifconfig", lambda: shcmd("ipconfig" if pycompat.iswindows else "ifconfig")), ( "airport", lambda: shcmd( "/System/Library/PrivateFrameworks/Apple80211." + "framework/Versions/Current/Resources/airport " + "--getinfo", check=False, ), ), ("hg debugnetwork", lambda: hgcmd("debugnetwork")), ("hg debugnetworkdoctor", lambda: hgcmd("debugnetworkdoctor")), ("infinitepush backup state", lambda: readinfinitepushbackupstate(repo)), ("commit cloud workspace sync state", lambda: readcommitcloudstate(repo)), ( "infinitepush / commitcloud backup logs", lambda: infinitepushbackuplogs(ui, repo), ), ("scm daemon logs", lambda: scmdaemonlog(ui, repo)), ("debugstatus", lambda: hgcmd("debugstatus")), ("debugtree", lambda: hgcmd("debugtree")), ("hg config (all)", lambda: "\n".join(allconfig(ui))), ("edenfs rage", _edenfs_rage), ( "environment variables", lambda: "\n".join( sorted(["{}={}".format(k, v) for k, v in encoding.environ.items()]) ), ), ("ssh config", lambda: shcmd("ssh -G hg.vip.facebook.com", check=False)), ("debuglocks", lambda: hgcmd("debuglocks")), ("x2pagentd info", lambda: checkproxyagentstate(ui)), ("sks-agent rage", lambda: sksagentrage(ui)), ] msg = "" footnotes = [] timeout = opts.get("timeout") or 20 def _failsafe(gen, timeout=timeout): class TimedOut(RuntimeError): pass def target(result, gen): try: result.append(gen()) except TimedOut: return except Exception as ex: index = len(footnotes) + 1 footnotes.append( "[%d]: %s\n%s\n\n" % (index, str(ex), traceback.format_exc()) ) result.append("(Failed. See footnote [%d])" % index) result = [] thread = threading.Thread(target=target, args=(result, gen)) thread.daemon = True thread.start() thread.join(timeout) if result: value = result[0] return value else: if thread.is_alive(): # Attempt to stop the thread, since hg is not thread safe. # There is no pure Python API to interrupt a thread. # But CPython C API can do that. ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(thread.ident), ctypes.py_object(TimedOut) ) return ( "(Did not complete in %s seconds, rerun with a larger --timeout to collect this)" % timeout ) msg = [] profile = [] allstart = time.time() for name, gen in basic: msg.append("%s: %s\n\n" % (name, _failsafe(gen))) profile.append((time.time() - allstart, "basic info", None)) for name, gen in detailed: start = time.time() with progress.spinner(ui, name): value = _failsafe(gen) finish = time.time() msg.append( "%s: (%.2f s)\n---------------------------\n%s\n\n" % (name, finish - start, value) ) profile.append((finish - start, name, value.count("\n"))) allfinish = time.time() profile.append((allfinish - allstart, "total time", None)) msg.append("hg rage profile:\n") width = max([len(name) for _t, name, _l in profile]) for timetaken, name, lines in reversed(sorted(profile)): m = " %-*s %8.2f s" % (width + 1, name + ":", timetaken) if lines is not None: msg.append("%s for %4d lines\n" % (m, lines)) else: msg.append("%s\n" % m) msg.append("\n") msg.extend(footnotes) msg = "".join(msg) encoding.encoding = oldencoding encoding.encodingmode = oldencodingmode return msg
def __init__(self, state, command): super(executerelative, self).__init__(state, command) self.cwd = pycompat.getcwd()