def _makenodes(data): nodes = {} for nodeinfo in data["nodes"]: node = ensurestr(nodeinfo["node"]) parents = [encodeutf8(ensurestr(p)) for p in nodeinfo["parents"]] bookmarks = [ensurestr(b) for b in nodeinfo["bookmarks"]] author = ensurestr(nodeinfo["author"]) date = int(nodeinfo["date"]) message = ensurestr(nodeinfo["message"]) phase = ensurestr(nodeinfo["phase"]) nodes[encodeutf8(node)] = NodeInfo(node, bookmarks, parents, author, date, message, phase) return nodes
def _settokentofile(self, token, isbackedup=False): """On platforms except macOS tokens are stored in a file""" # backup token if optional backup is enabled if self.usesecretstool and not isbackedup: try: self._settokeninsecretstool(token) isbackedup = True except Exception: pass with self.vfs.open(self.filename, "wb") as configfile: configfile.write(b"[commitcloud]\nuser_token=%s\nbackedup=%s\n" % (pycompat.encodeutf8(token), pycompat.encodeutf8(str(isbackedup))))
def _send(self, path, data): e = None rdata = None # print request if debugrequests and debug are both on if self.debugrequests: self.ui.debug("%s\n" % json.dumps(cleandict(data))) if self._getheader("Content-Encoding") == "gzip": buffer = util.stringio() with gzip.GzipFile(fileobj=buffer, mode="w") as compressed: compressed.write(pycompat.encodeutf8(json.dumps(data))) compressed.flush() rdata = buffer.getvalue() else: rdata = pycompat.encodeutf8(json.dumps(data)) # exponential backoff here on failure, 1s, 2s, 4s, 8s, 16s etc sl = 1 for attempt in range(MAX_CONNECT_RETRIES): try: self.connection.request("POST", path, rdata, self.headers) resp = self.connection.getresponse() if resp.status == httplib.UNAUTHORIZED: raise ccerror.RegistrationError( self.ui, _("unauthorized client (token is invalid)")) if resp.status != httplib.OK: raise ccerror.ServiceError( self.ui, "%d %s" % (resp.status, resp.reason)) if resp.getheader("Content-Encoding") == "gzip": resp = gzip.GzipFile(fileobj=util.stringio(resp.read())) data = json.load(resp) # print response if debugrequests and debug are both on if self.debugrequests: self.ui.debug("%s\n" % json.dumps(cleandict(data))) return data except httplib.HTTPException: self.connection.close() self.connection.connect() except (socket.timeout, socket.gaierror) as e: raise error.Abort(_("network error: %s") % e, hint=_("check your network connection")) except socket.error as e: if "SSL" in str(e): raise ccerror.TLSAccessError(self.ui, str(e)) raise ccerror.ServiceError(self.ui, str(e)) except ssl.CertificateError as e: raise ccerror.TLSAccessError(self.ui, str(e)) time.sleep(sl) sl *= 2 if e: raise ccerror.ServiceError(self.ui, str(e))
def _getannotate(repo, proto, path, lastnode): # Older fastannotte sent binary nodes. Newer fastannotate sends hex. if len(lastnode) == 40: lastnode = bin(lastnode) # output: # FILE := vfspath + '\0' + str(size) + '\0' + content # OUTPUT := '' | FILE + OUTPUT result = b"" buildondemand = repo.ui.configbool("fastannotate", "serverbuildondemand", True) with context.annotatecontext(repo, path) as actx: if buildondemand: # update before responding to the client master = _getmaster(repo.ui) try: if not actx.isuptodate(master): actx.annotate(master, master) except Exception: # non-fast-forward move or corrupted. rebuild automically. actx.rebuild() try: actx.annotate(master, master) except Exception: actx.rebuild() # delete files finally: # although the "with" context will also do a close/flush, we # need to do it early so we can send the correct respond to # client. actx.close() # send back the full content of revmap and linelog, in the future we # may want to do some rsync-like fancy updating. # the lastnode check is not necessary if the client and the server # agree where the main branch is. if actx.lastnode != lastnode: for p in [actx.revmappath, actx.linelogpath]: if not os.path.exists(p): continue content = b"" with open(p, "rb") as f: content = f.read() vfsbaselen = len(repo.localvfs.base + "/") relpath = p[vfsbaselen:] result += b"%s\0%s\0%s" % ( pycompat.encodeutf8(relpath), pycompat.encodeutf8(str(len(content))), content, ) return result
def _createdatabase(): schema = open( shlib.expandpath("$TESTDIR/hgsql/schema.%s.sql" % dbconfig["dbengine"]), "r" ).read() p = subprocess.Popen( [ "mysql", "-h%s" % dbconfig["dbhost"], "-P%s" % dbconfig["dbport"], "-u%s" % dbconfig["dbuser"], "-p%s" % dbconfig["dbpass"], ], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = p.communicate( encodeutf8( r""" CREATE DATABASE IF NOT EXISTS {dbname}; USE {dbname}; DROP TABLE IF EXISTS revisions; DROP TABLE IF EXISTS revision_references; DROP TABLE IF EXISTS repo_lock; {schema} """.format( dbname=dbconfig["dbname"], dbengine=dbconfig["dbengine"], schema=schema ) ) ) if p.returncode != 0: raise RuntimeError("failed to create mysql database: %s\n%s" % (stdout, stderr))
def _checkoutlinelogwithedits(self): """() -> [str]. prompt all lines for edit""" alllines = self.linelog.getalllines() # header editortext = ( _( 'HG: editing %s\nHG: "y" means the line to the right ' "exists in the changeset to the top\nHG:\n" ) % self.fctxs[-1].path() ) # [(idx, fctx)]. hide the dummy emptyfilecontext visiblefctxs = [ (i, f) for i, f in enumerate(self.fctxs) if not isinstance(f, emptyfilecontext) ] for i, (j, f) in enumerate(visiblefctxs): editortext += _("HG: %s/%s %s %s\n") % ( "|" * i, "-" * (len(visiblefctxs) - i + 1), node.short(f.node()), f.description().split("\n", 1)[0], ) editortext += _("HG: %s\n") % ("|" * len(visiblefctxs)) # figure out the lifetime of a line, this is relatively inefficient, # but probably fine lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}} for i, f in visiblefctxs: self.linelog.annotate((i + 1) * 2) for l in self.linelog.annotateresult: lineset[l].add(i) # append lines for l in alllines: editortext += " %s : %s" % ( "".join([("y" if i in lineset[l] else " ") for i, _f in visiblefctxs]), decodeutf8(self._getline(l)), ) # run editor editedtext = self.ui.edit(editortext, "", action="absorb") if not editedtext: raise error.Abort(_("empty editor text")) # parse edited result contents = [b"" for i in self.fctxs] leftpadpos = 4 colonpos = leftpadpos + len(visiblefctxs) + 1 for l in editedtext.splitlines(True): if l.startswith("HG:"): continue if l[colonpos - 1 : colonpos + 2] != " : ": raise error.Abort(_("malformed line: %s") % l) linecontent = encodeutf8(l[colonpos + 2 :]) for i, ch in enumerate(l[leftpadpos : colonpos - 1]): if ch == "y": contents[visiblefctxs[i][0]] += linecontent # chunkstats is hard to calculate if anything changes, therefore # set them to just a simple value (1, 1). if editedtext != editortext: self.chunkstats = [1, 1] return contents
def todict(self): self._check() if self.content is not None: return {b"content": self.content} if self.oid is not None: return {b"oid": encodeutf8(self.oid)} return None
def get_unseen_commits(todo): """get all unseen commits reachable from todo in topological order 'unseen' means not reachable from the done set and not in the git map. Mutates todo and the done set in the process.""" commits = [] while todo: sha = todo[-1] if sha in done or git_map.lookupbyfirst(bin(sha)) is not None: todo.pop() continue if sha in commit_cache: obj = commit_cache[sha] else: obj = git_object_store[pycompat.encodeutf8(sha)] commit_cache[sha] = obj assert isinstance(obj, Commit) for p in obj.parents: p = pycompat.decodeutf8(p) if p not in done and git_map.lookupbyfirst(bin(p)) is None: todo.append(p) # process parents of a commit before processing the # commit itself, and come back to this commit later break else: commits.append(sha) done.add(sha) todo.pop() return commits
def _batchrequest(self, pointers, action): """Get metadata about objects pointed by pointers for given action Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]} See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md """ self.ui.log("lfs_url", lfs_url=self.baseurl) objects = [{"oid": p.oid(), "size": p.size()} for p in pointers] requestdata = pycompat.encodeutf8( json.dumps({ "objects": objects, "operation": action })) batchreq = util.urlreq.request("%s/objects/batch" % self.baseurl, data=requestdata) batchreq.add_header("Accept", "application/vnd.git-lfs+json") batchreq.add_header("Content-Type", "application/vnd.git-lfs+json") self._addextraheaders(batchreq) try: res = self.urlopener.open(batchreq) server = res.info().get("server") self.ui.log("lfs_server", lfs_server=server) rawjson = res.read() except util.urlerr.httperror as ex: raise LfsRemoteError( _("LFS HTTP error: %s (action=%s)") % (ex, action)) try: response = json.loads(rawjson) except ValueError: raise LfsRemoteError( _("LFS server returns invalid JSON: %s") % rawjson) return response
def mcset(key, value, ui): """ Use local mcrouter to set a key to memcache """ if type(key) != str: raise ValueError("Key must be a string") if type(value) != bytes: raise ValueError("Value must be bytes") key = pycompat.encodeutf8("cca.hg.%s" % key) sz = len(value) tmpl = b"set %s 0 0 %d\r\n%s\r\n" try: mcroutersocket.sendall(tmpl % (key, sz, value)) except (socket.error, error.SignalInterrupt): mcroutersocket.connect(gethostport(ui)) mcroutersocket.sendall(tmpl % (key, sz, value)) data = [] while True: char = mcroutersocket.recv(1) # No data was received, potentially due to a closed connection, let's # just return. if char == b"": return False if char not in b"\r\n": data.append(char) else: break return b"".join(data) == b"STORED"
def _writetracking(repo, tracking): with repo.wlock(): data = "" for book, track in pycompat.iteritems(tracking): data += "%s %s\n" % (book, track) vfs = repo.sharedvfs vfs.write("bookmarks.tracking", pycompat.encodeutf8(data))
def _filename(cls, workspacename): # make a unique valid filename return ( cls.prefix + "".join(x for x in workspacename if x.isalnum()) + ".%s" % (hashlib.sha256(encodeutf8(workspacename)).hexdigest()[0:5]) )
def tree_entry(fctx, blob_cache): """Compute a dulwich TreeEntry from a filectx. A side effect is the TreeEntry is stored in the passed cache. Returns a 2-tuple of (dulwich.objects.TreeEntry, dulwich.objects.Blob). """ blob_id = blob_cache.get(fctx.filenode(), None) blob = None if blob_id is None: blob = dulobjs.Blob.from_string(fctx.data()) blob_id = blob.id blob_cache[fctx.filenode()] = blob_id flags = fctx.flags() if "l" in flags: mode = 0o120000 elif "x" in flags: mode = 0o100755 else: mode = 0o100644 return ( dulobjs.TreeEntry( pycompat.encodeutf8(os.path.basename(fctx.path())), mode, blob_id), blob, )
def sync(repo, *args, **kwargs): with backuplock.lock(repo): try: besteffort = kwargs.pop("besteffort", False) nc = util.nullcontextmanager with nc() if besteffort else repo.wlock(), nc( ) if besteffort else repo.lock(): rc, synced = _sync(repo, *args, **kwargs) if synced is not None: with repo.svfs(_syncstatusfile, "w+") as fp: fp.write(encodeutf8("Success" if synced else "Failed")) except BaseException as e: with repo.svfs(_syncstatusfile, "w+") as fp: fp.write(encodeutf8("Exception:\n%s" % e)) raise return rc
def setworkspace(repo, workspace): """Sets the currently connected workspace.""" with repo.wlock(), repo.lock(), repo.svfs.open(filename, "wb", atomictemp=True) as f: f.write(b"[commitcloud]\ncurrent_workspace=%s\n" % pycompat.encodeutf8(workspace))
def __init__(self, repo, remotepath): self.repo = repo self.remotepath = remotepath repo.sharedvfs.makedirs("commitcloud") self.filename = os.path.join( self.prefix + hashlib.sha256(encodeutf8(remotepath)).hexdigest()[0:8]) self.heads = set() if repo.sharedvfs.exists(self.filename): lines = repo.sharedvfs.readutf8(self.filename).splitlines() if len(lines) < 2 or lines[0].strip() != FORMAT_VERSION: repo.ui.debug( "unrecognised backedupheads version '%s', ignoring\n" % lines[0].strip()) self.initfromserver() return if lines[1].strip() != remotepath: repo.ui.debug( "backupheads file is for a different remote ('%s' instead of '%s'), reinitializing\n" % (lines[1].strip(), remotepath)) self.initfromserver() return heads = (nodemod.bin(head.strip()) for head in lines[2:]) hasnode = repo.changelog.hasnode self.heads = {h for h in heads if hasnode(h)} else: self.initfromserver()
def git_cleanup(ui, repo): """clean up Git commit map after history editing""" items = repo.githandler._map.items() if ui.configbool("hggit", "indexedlognodemap", False): dir = repo.sharedvfs.join(repo.githandler.map_file + "-log") tempdir = dir + ".temp" if os.path.exists(tempdir): hgutil.removedirs(tempdir) nodemap = nodemapmod.nodemap(tempdir) for gitsha, hgsha in items: if hgsha in repo: nodemap.add(gitsha, hgsha) nodemap.flush() with repo.wlock(): tempdir2 = dir + ".temp2" hgutil.rename(dir, tempdir2) hgutil.rename(tempdir, dir) shutil.rmtree(tempdir2) new_map = [] for gitsha, hgsha in items: if hgsha in repo: new_map.append( pycompat.encodeutf8("%s %s\n" % (hex(gitsha), hex(hgsha)))) wlock = repo.wlock() try: f = repo.sharedvfs(GitHandler.map_file, "wb") list(map(f.write, new_map)) finally: wlock.release() ui.status(_("git commit map cleaned\n"))
def __init__(self, repo, remotepath, resetlocalstate=False, usehttp=False): self.repo = repo self.remotepath = remotepath self.usehttp = usehttp repo.sharedvfs.makedirs(self.directory) self.filename = os.path.join( self.directory, self.prefix + hashlib.sha256(encodeutf8(remotepath)).hexdigest()[0:8], ) self.heads = set() if repo.sharedvfs.exists(self.filename) and not resetlocalstate: lines = repo.sharedvfs.readutf8(self.filename).splitlines() if len(lines) < 2 or lines[0].strip() != FORMAT_VERSION: repo.ui.debug( "unrecognised backedupheads version '%s', ignoring\n" % lines[0].strip() ) self.initfromserver() return if lines[1].strip() != remotepath: repo.ui.debug( "backupheads file is for a different remote ('%s' instead of '%s'), reinitializing\n" % (lines[1].strip(), remotepath) ) self.initfromserver() return heads = [nodemod.bin(head.strip()) for head in lines[2:]] heads = repo.changelog.filternodes(heads, local=True) self.heads = set(heads) else: self.initfromserver()
def encodebookmarks(bookmarks): encoded = {} for bookmark, node in pycompat.iteritems(bookmarks): encoded[bookmark] = node dumped = pycompat.encodeutf8(json.dumps(encoded)) result = struct.pack(">i", len(dumped)) + dumped return result
def _makenodes(data): nodes = {} for nodeinfo in data["nodes"]: node = ensurestr(nodeinfo["node"]) parents = [encodeutf8(ensurestr(p)) for p in nodeinfo["parents"]] bookmarks = [ensurestr(b) for b in nodeinfo["bookmarks"]] author = ensurestr(nodeinfo["author"]) date = int(nodeinfo["date"]) message = ensurestr(nodeinfo["message"]) phase = ensurestr(nodeinfo["phase"]) if "remote_bookmarks" in nodeinfo: bookmarks.extend("%s/%s" % (ensurestr(bm["remote"]), ensurestr(bm["name"])) for bm in nodeinfo["remote_bookmarks"] or []) nodes[encodeutf8(node)] = NodeInfo(node, bookmarks, parents, author, date, message, phase) return nodes
def writedistancecache(repo, distance): try: cachevfs = shareawarecachevfs(repo) f = cachevfs("distance", "w", atomictemp=True) for k, v in pycompat.iteritems(distance): f.write(pycompat.encodeutf8("%s %d %d\n" % (k, v[0], v[1]))) except (IOError, OSError): pass
def _request(self, request, flush=True): if self.connected: try: self.pipei.write(pycompat.encodeutf8(request)) if flush: self.pipei.flush() except IOError: self.close()
def serialize(cls, status): ls = [list(status[i]) for i in range(7)] ll = [] for s in ls: ll.append([ pycompat.decodeutf8(base64.b64encode(pycompat.encodeutf8(f))) for f in s ]) return super(buildstatusserializer, cls).serialize(ll)
def todict(self): files = {} files[b"deleted"] = { encodeutf8(d.path): d.todict() for d in self.deleted } files[b"unknown"] = { encodeutf8(u.path): u.todict() for u in self.unknown } files[b"localvfsfiles"] = { encodeutf8(f.path): f.todict() for f in self.localvfsfiles } return { b"files": files, b"version": encodeutf8(str(snapshotmetadata.VERSION)) }
def tocrlf(s, params, ui, **kwargs): """Filter to convert to CRLF EOLs.""" if util.binary(s): return s if ui.configbool("eol", "only-consistent") and inconsistenteol(s): return s if ui.configbool("eol", "fix-trailing-newline") and s and s[-1:] != b"\n": s = s + b"\n" return pycompat.encodeutf8(util.tocrlf(pycompat.decodeutf8(s)))
def _createbundler(ui, repo, other): bundler = bundle2.bundle20(ui, bundle2.bundle2caps(other)) compress = ui.config("infinitepush", "bundlecompression", "UN") bundler.setcompression(compress) # Disallow pushback because we want to avoid taking repo locks. # And we don't need pushback anyway capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, allowpushback=False)) bundler.newpart("replycaps", data=pycompat.encodeutf8(capsblob)) return bundler
def deserialize(cls, string): ll = super(buildstatusserializer, cls).deserialize(string) ls = [] for l in ll: ls.append([ pycompat.decodeutf8(base64.b64decode(pycompat.encodeutf8(f))) for f in l ]) return status(*ls)
def _writelocalbackupstate(repo, remotepath, heads, bookmarks): state = { "heads": list(heads), "bookmarks": bookmarks, "remotepath": remotepath } with repo.sharedvfs(_localbackupstatepath(remotepath), "w", atomictemp=True) as f: f.write(pycompat.encodeutf8(json.dumps(state)))
def _isutf8(ui, name): if not util.isvalidutf8(name): # We don't support non-utf8 file names, so just ignore it. # Passing it along to the rest of Mercurial can cause issues # since the Python-to-Rust boundary doesn't support # surrogate escaped strings. name = pycompat.decodeutf8(pycompat.encodeutf8(name, errors="replace")) ui.warn(_("skipping invalid utf-8 filename: '%s'\n") % name) return False return True
def _handlecommandexception(orig, ui): warning = dispatch._exceptionwarning(ui) if ui.configbool("errorredirect", "fancy-traceback"): trace = util.smartformatexc() else: trace = traceback.format_exc() # let blackbox log it (if it is configured to do so) ui.log("command_exception", "%s\n%s\n", warning, trace) exctype = sys.exc_info()[0] exctypename = "None" if exctype is None else exctype.__name__ ui.log( "hgerrors", "exception has occurred: %s", warning, exception_type=exctypename, traceback=trace, ) script = ui.config("errorredirect", "script") if not script: return orig(ui) # run the external script env = encoding.environ.copy() env["WARNING"] = warning env["TRACE"] = trace # decide whether to use shell smartly, see 9335dc6b2a9c in hg shell = any(c in script for c in "|&;<>()$`\\\"' \t\n*?[#~=%") try: p = subprocess.Popen(script, shell=shell, stdin=subprocess.PIPE, env=env) p.communicate(pycompat.encodeutf8(trace)) except Exception: # The binary cannot be executed, or some other issues. For example, # "script" is not in PATH, and shell is False; or the peer closes the # pipe early. Fallback to the plain error reporting. return _printtrace(ui, warning) else: ret = p.returncode # Python returns negative exit code for signal-terminated process. The # shell converts singal-terminated process to a positive exit code by # +128. Ctrl+C generates SIGTERM. Re-report the error unless the # process exits cleanly or is terminated by SIGTERM (Ctrl+C). ctrlc = (ret == signal.SIGTERM + 128) or (ret == -signal.SIGTERM) if ret != 0 and not ctrlc: return _printtrace(ui, warning) return True # do not re-raise