Exemple #1
0
def _parseresponse(payload):
    result = {}
    i = 0
    l = len(payload) - 1
    state = 0  # 0: vfspath, 1: size
    vfspath = size = b""
    while i < l:
        ch = payload[i:i + 1]
        if ch == b"\0":
            if state == 1:
                sizeint = int(pycompat.decodeutf8(size))
                buf = buffer(payload)[i + 1:i + 1 + sizeint]
                result[pycompat.decodeutf8(vfspath)] = buf
                i += sizeint
                state = 0
                vfspath = size = b""
            elif state == 0:
                state = 1
        else:
            if state == 1:
                size += ch
            elif state == 0:
                vfspath += ch
        i += 1
    return result
Exemple #2
0
def _execute(ui, repo, target=None):
    script = ui.config("stablerev", "script")
    if script is None:
        raise error.ConfigError(_("must set stablerev.script"))

    # Pass '--target $TARGET' for compatibility.
    # XXX: Remove this once the new code has been rolled out for some time.
    if target is not None:
        script += " --target %s" % util.shellquote(target)
    try:
        ui.debug("repo-specific script for stable: %s\n" % script)
        reporoot = repo.wvfs.join("")
        env = encoding.environ.copy()
        env.update({"REAL_CWD": pycompat.getcwd(), "HG_ROOT": reporoot})
        if target is not None:
            env["TARGET"] = target
        ui.debug("setting current working directory to: %s\n" % reporoot)
        p = subprocess.Popen(
            script,
            shell=True,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            close_fds=util.closefds,
            cwd=reporoot,
            env=env,
        )
        res = p.communicate()
        ui.debug("stable script returns: %r\n" % (res, ))
        return (pycompat.decodeutf8(res[0]), pycompat.decodeutf8(res[1]))
    except subprocess.CalledProcessError as e:
        raise error.Abort(_("couldn't fetch stable rev: %s") % e)
Exemple #3
0
    def getchangedfiles(self, version, i):
        changes = []
        if i is None:
            output, status = self.gitrunlines(
                "diff-tree", "--root", "-m", "-r", version
            )
            if status:
                raise error.Abort(_("cannot read changes in %s") % version)
            for l in output:
                if b"\t" not in l:
                    continue
                m, f = l[:-1].split(b"\t")
                changes.append(decodeutf8(f))
        else:
            output, status = self.gitrunlines(
                "diff-tree",
                "--name-only",
                "--root",
                "-r",
                version,
                "%s^%s" % (version, i + 1),
                "--",
            )
            if status:
                raise error.Abort(_("cannot read changes in %s") % version)
            changes = [decodeutf8(f).rstrip("\n") for f in output]

        return changes
Exemple #4
0
    def _run(
        self,
        cmd,
        cwd=None,
        env=None,
        stderr=False,
        input=None,
        timeout=0,
        returncode=False,
    ):
        """Adapted from fbcode/scm/lib/_repo.py:Repository::run"""
        cmd = [util.hgexecutable(), "-R", self.repo.origroot] + cmd
        stdin = None
        if input:
            stdin = subprocess.PIPE
        p = self._spawn(
            cmd,
            cwd=cwd,
            env=env,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            stdin=stdin,
            timeout=timeout,
        )
        if input:
            if not isinstance(input, bytes):
                input = input.encode("utf-8")
            out, err = p.communicate(input=input)
        else:
            out, err = p.communicate()

        out = decodeutf8(out)
        err = decodeutf8(err)

        if p.returncode != 0 and returncode is False:
            self.ui.warn(_("run call failed!\n"))
            # Sometimes git or hg error output can be very big.
            # Let's limit stderr and stdout to 1000
            OUTPUT_LIMIT = 1000
            out = out[:OUTPUT_LIMIT]
            err = err[:OUTPUT_LIMIT]
            out = "STDOUT: %s\nSTDERR: %s\n" % (out, err)
            cmdstr = " ".join(
                [self._safe_bytes_to_str(entry) for entry in cmd])
            cmdstr += "\n%s" % out
            ex = subprocess.CalledProcessError(p.returncode, cmdstr)
            ex.output = out
            raise ex

        if out and self.printout:
            self.ui.warn(_("stdout: %s\n") % out)
        if err and self.printout:
            self.ui.warn(_("stderr: %s\n") % err)

        if returncode:
            return out, err, p.returncode

        if stderr:
            return out, err, None
        return out, "", None
Exemple #5
0
    def getbookmarks(self):
        bookmarks = {}

        # Handle local and remote branches
        remoteprefix = self.ui.config("convert", "git.remoteprefix")
        reftypes = [
            # (git prefix, hg prefix)
            ("refs/remotes/origin/", remoteprefix + "/"),
            ("refs/heads/", ""),
        ]

        exclude = {"refs/remotes/origin/HEAD"}

        try:
            output, status = self.gitrunlines("show-ref")
            for line in output:
                line = line.strip()
                rev, name = line.split(None, 1)
                rev = decodeutf8(rev)
                name = decodeutf8(name)
                # Process each type of branch
                for gitprefix, hgprefix in reftypes:
                    if not name.startswith(gitprefix) or name in exclude:
                        continue
                    name = "%s%s" % (hgprefix, name[len(gitprefix) :])
                    bookmarks[name] = rev
        except Exception:
            pass

        return bookmarks
Exemple #6
0
 def fromdict(cls, metadatadict):
     # check version of metadata
     try:
         version = int(metadatadict.get(b"version"))
     except ValueError:
         raise error.Abort("invalid metadata version: %s\n" %
                           (metadatadict.get(b"version"), ))
     if version != snapshotmetadata.VERSION:
         raise error.Abort("invalid version number %d" % (version, ))
     try:
         files = metadatadict[b"files"]
         deleted = [
             filewrapper(decodeutf8(path))
             for path in sorted(files[b"deleted"].keys())
         ]
         unknown = [
             filewrapper.fromdict(decodeutf8(path), data)
             for path, data in sorted(files[b"unknown"].items())
         ]
         localvfsfiles = [
             filewrapper.fromdict(decodeutf8(path), data)
             for path, data in sorted(files[b"localvfsfiles"].items())
         ]
         return cls(deleted=deleted,
                    unknown=unknown,
                    localvfsfiles=localvfsfiles)
     except ValueError:
         raise error.Abort("invalid metadata: %s\n" % (metadatadict, ))
Exemple #7
0
    def bundle2getgithgmap(op, part):
        params = dict(part.mandatoryparams)
        if _validatepartparams(op, params):
            filename = params["filename"]
            with op.repo.wlock():
                data = _githgmappayload.fromjson(pycompat.decodeutf8(part.read()))
                missinglines = data.missinglines

                # No need to update anything if already in sync.
                if not missinglines:
                    return

                if data.needfullsync:
                    newlines = missinglines
                else:
                    mapfile = _getfile(op.repo, filename)
                    if mapfile:
                        currentlines = set(
                            pycompat.decodeutf8(l) for l in mapfile.readlines()
                        )
                        if currentlines & missinglines:
                            msg = "warning: gitmeta: unexpected lines in .hg/%s\n"
                            op.repo.ui.warn(_(msg) % filename)

                        currentlines.update(missinglines)
                        newlines = currentlines
                    else:
                        raise error.Abort(
                            _("gitmeta: could not read from .hg/%s") % filename
                        )

                _writefile(op, filename, pycompat.encodeutf8("".join(newlines)))
                _writefile(
                    op, hgheadsfile, pycompat.encodeutf8("\n".join(data.newheads))
                )
Exemple #8
0
 def deserialize(cls, text: bytes) -> str:
     try:
         decoded = pycompat.decodeutf8(text)
         return cls(l.split(" ", 1) for l in decoded.splitlines()).validate()
     except ValueError:  # l.split returns 1 item instead of 2
         raise InvalidPointer(
             _("cannot parse git-lfs text: %r")
             % pycompat.decodeutf8(text, errors="replace")
         )
Exemple #9
0
 def addtree(tree, dirname):
     for entry in pycompat.iteritems(tree):
         if entry.mode & 0o40000:
             # expand directory
             subtree = self.repo.handler.git.get_object(entry.sha)
             addtree(subtree, dirname + pycompat.decodeutf8(entry.path) + "/")
         else:
             path = dirname + pycompat.decodeutf8(entry.path)
             self._map[path] = bin(entry.sha)
             self._flags[path] = hgflag(entry.mode)
Exemple #10
0
def shcmd(cmd, input=None, check=True, keeperr=True):
    _, _, _, p = util.popen4(cmd)
    out, err = p.communicate(input)
    out = pycompat.decodeutf8(out, errors="replace")
    err = pycompat.decodeutf8(err, errors="replace")
    if check and p.returncode:
        raise error.Abort(cmd + " error: " + err)
    elif keeperr:
        out += err
    return out
Exemple #11
0
    def getcommit(self, version):
        c = self.catfile(version, "commit")  # read the commit hash
        end = c.find(b"\n\n")
        message = c[end + 2 :]
        message = decodeutf8(self.recode(message))
        l = c[:end].splitlines()
        parents = []
        author = committer = None
        extra = {}
        for e in l[1:]:
            n, v = e.split(b" ", 1)
            if n == b"author":
                p = v.split()
                tm, tz = p[-2:]
                author = b" ".join(p[:-2])
                if author[0:1] == b"<":
                    author = author[1:-1]
                author = decodeutf8(self.recode(author))
            if n == b"committer":
                p = v.split()
                tm, tz = p[-2:]
                committer = b" ".join(p[:-2])
                if committer[0:1] == b"<":
                    committer = committer[1:-1]
                committer = decodeutf8(self.recode(committer))
            if n == b"parent":
                parents.append(decodeutf8(v))
            if decodeutf8(n) in self.copyextrakeys:
                extra[decodeutf8(n)] = decodeutf8(v)

        if self.committeractions["dropcommitter"]:
            committer = None
        elif self.committeractions["replaceauthor"]:
            author = committer

        if committer:
            messagealways = self.committeractions["messagealways"]
            messagedifferent = self.committeractions["messagedifferent"]
            if messagealways:
                message += "\n%s %s\n" % (messagealways, committer)
            elif messagedifferent and author != committer:
                message += "\n%s %s\n" % (messagedifferent, committer)

        tm = decodeutf8(tm)
        tz = decodeutf8(tz)
        tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
        tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
        date = tm + " " + str(tz)
        saverev = self.ui.configbool("convert", "git.saverev")

        c = common.commit(
            parents=parents,
            date=date,
            author=author,
            desc=message,
            rev=version,
            extra=extra,
            saverev=saverev,
        )
        return c
Exemple #12
0
def debugverifylinkrevcache(ui, repo, *pats, **opts):
    """read the linkrevs from the database and verify if they are correct"""
    # restore to the original _adjustlinkrev implementation
    c = context.basefilectx
    extensions.unwrapfunction(c, "_adjustlinkrev", _adjustlinkrev)

    paths = {}  # {id: name}
    nodes = {}  # {id: name}

    repo = repo.unfiltered()
    idx = repo.unfiltered().changelog.index

    db = repo._linkrevcache
    paths = dict(db._getdb(db._pathdbname))
    nodes = dict(db._getdb(db._nodedbname))
    pathsrev = dict(
        (v, pycompat.decodeutf8(k)) for k, v in pycompat.iteritems(paths))
    nodesrev = dict((v, k) for k, v in pycompat.iteritems(nodes))
    lrevs = dict(db._getdb(db._linkrevdbname))

    readfilelog = ui.configbool("linkrevcache", "readfilelog", True)

    total = len(lrevs)
    with progress.bar(ui, _("verifying"), total=total) as prog:
        for i, (k, v) in enumerate(pycompat.iteritems(lrevs)):
            prog.value = i
            pathid, nodeid = k.split(b"\0")
            path = pathsrev[pathid]
            fnode = nodesrev[nodeid]
            linkrevs = _str2intlist(pycompat.decodeutf8(v))
            linkrevs.sort()

            for linkrev in linkrevs:
                fctx = repo[linkrev][path]
                introrev = fctx.introrev()
                fctx.linkrev()
                if readfilelog:
                    flinkrev = fctx.linkrev()
                else:
                    flinkrev = None
                if introrev == linkrev:
                    continue
                if introrev in idx.commonancestorsheads(
                        introrev, linkrev) and (introrev in linkrevs
                                                or introrev == flinkrev):
                    adjective = _("unnecessary")
                else:
                    adjective = _("incorrect")
                ui.warn(
                    _("%s linkrev %s for %s @ %s (expected: %s)\n") %
                    (adjective, linkrev, path, node.hex(fnode), introrev))

    ui.write(_("%d entries verified\n") % total)
Exemple #13
0
 def checkexit(self, status, output=""):
     if status:
         if output:
             self.ui.warn(_("%s error:\n") % self.command)
             self.ui.warn(pycompat.decodeutf8(output, errors="replace"))
         msg = util.explainexit(status)[0]
         raise error.Abort("%s %s" % (self.command, msg))
Exemple #14
0
def getconfigs(wctx):
    """returns {name: [path]}.
    [path] under a same name are synced. name is not useful.
    """
    # read from .hgdirsync in repo
    filename = ".hgdirsync"
    try:
        content = pycompat.decodeutf8(wctx[filename].data())
    except (error.ManifestLookupError, IOError, AttributeError, KeyError):
        content = ""
    cfg = config.config()
    if content:
        cfg.parse(filename, "[dirsync]\n%s" % content, ["dirsync"])

    maps = util.sortdict()
    repo = wctx.repo()
    for key, value in repo.ui.configitems("dirsync") + cfg.items("dirsync"):
        if "." not in key:
            continue
        name, disambig = key.split(".", 1)
        # Normalize paths to have / at the end. For easy concatenation later.
        if value[-1] != "/":
            value = value + "/"
        if name not in maps:
            maps[name] = []
        maps[name].append(value)
    return maps
Exemple #15
0
def debugshell(ui, repo, **opts):
    command = opts.get("command")

    _assignobjects(locals(), repo)

    if command:
        exec(command)
        return 0

    if not ui.interactive():
        command = decodeutf8(ui.fin.read())
        exec(command)
        return 0

    bannermsg = "loaded repo:  %s\n" "using source: %s" % (
        repo and repo.root or "(none)",
        mercurial.__path__[0],
    ) + ("\n\nAvailable variables:\n"
         " e:  edenscm\n"
         " n:  edenscmnative\n"
         " m:  edenscm.mercurial\n"
         " x:  edenscm.hgext\n"
         " b:  bindings\n"
         " ui: the ui object")
    if repo:
        bannermsg += (
            "\n repo: the repo object\n cl: repo.changelog\n mf: repo.manifestlog"
        )

    import IPython

    IPython.embed(header=bannermsg)
Exemple #16
0
    def _open(self, vfs, filename="namejournal", _newestfirst=True):
        if not vfs.exists(filename):
            return

        with vfs(filename) as f:
            raw = f.read()

        lines = raw.split(b"\0")
        version = lines and pycompat.decodeutf8(lines[0])
        if version != str(storageversion):
            version = version or _("not available")
            raise error.Abort(_("unknown journal file version '%s'") % version)

        # Skip the first line, it's a version number. Normally we iterate over
        # these in reverse order to list newest first; only when copying across
        # a shared storage do we forgo reversing.
        lines = lines[1:]
        if _newestfirst:
            lines = reversed(lines)
        for line in lines:
            if not line:
                continue
            try:
                yield journalentry.fromstorage(line)
            except ValueError as ex:
                self.ui.debug("skipping corrupt journalentry: %s" % ex)
                # If a journal entry is corrupt, just skip it.
                pass
Exemple #17
0
    def get_unseen_commits(todo):
        """get all unseen commits reachable from todo in topological order

        'unseen' means not reachable from the done set and not in the git map.
        Mutates todo and the done set in the process."""
        commits = []
        while todo:
            sha = todo[-1]
            if sha in done or git_map.lookupbyfirst(bin(sha)) is not None:
                todo.pop()
                continue
            if sha in commit_cache:
                obj = commit_cache[sha]
            else:
                obj = git_object_store[pycompat.encodeutf8(sha)]
                commit_cache[sha] = obj
            assert isinstance(obj, Commit)
            for p in obj.parents:
                p = pycompat.decodeutf8(p)
                if p not in done and git_map.lookupbyfirst(bin(p)) is None:
                    todo.append(p)
                    # process parents of a commit before processing the
                    # commit itself, and come back to this commit later
                    break
            else:
                commits.append(sha)
                done.add(sha)
                todo.pop()

        return commits
Exemple #18
0
def checksshcommand(ui, url, opts):
    rui = hg.remoteui(ui, opts)
    sshcmd = rui.config("ui", "ssh")
    sshaddenv = dict(rui.configitems("sshenv"))
    sshenv = util.shellenviron(sshaddenv)
    args = util.sshargs(sshcmd, url.host, url.user, url.port)
    cmd = "%s %s %s" % (sshcmd, args, "hostname")
    ui.status(
        _("Testing SSH connection to the server: running 'hostname'\n"),
        component="debugnetwork",
    )
    ui.pushbuffer(subproc=True)
    starttime = util.timer()
    res = ui.system(cmd, blockedtag="debugnetwork", environ=sshenv)
    endtime = util.timer()
    hostname = pycompat.decodeutf8(ui.popbufferbytes()).strip()
    if res == 0:
        ui.status(
            _("Connected ok: %s\n") % util.timecount(endtime - starttime),
            component="debugnetwork",
        )
        ui.status(_("Server hostname is %s\n") % hostname,
                  component="debugnetwork")
        return True
    else:
        ui.status(_("Failed to connect: ssh returned %s\n") % res,
                  error=_("error"))
        return False
Exemple #19
0
    def write(self, annotatedresult, lines=None, existinglines=None):
        if annotatedresult:
            self._writecomma()

        pieces = [(name, list(map(f, annotatedresult)))
                  for f, sep, name, enc in self.funcmap]
        if lines is not None:
            pieces.append(
                ("line",
                 list(pycompat.decodeutf8(l, errors="replace")
                      for l in lines)))
        pieces.sort()

        seps = [","] * len(pieces[:-1]) + [""]

        result = ""
        lasti = len(annotatedresult) - 1
        for i in range(len(annotatedresult)):
            result += "\n {\n"
            for j, p in enumerate(pieces):
                k, vs = p
                result += '  "%s": %s%s\n' % (
                    k,
                    templatefilters.json(vs[i], paranoid=False),
                    seps[j],
                )
            result += " }%s" % ("" if i == lasti else ",")
        if lasti >= 0:
            self.needcomma = True

        self.ui.write(result)
Exemple #20
0
 def _load(self):
     """load state from file"""
     if not self.path:
         return
     # use local variables in a loop. CPython uses LOAD_FAST for them,
     # which is faster than both LOAD_CONST and LOAD_GLOBAL.
     flaglen = 1
     hshlen = _hshlen
     with open(self.path, "rb") as f:
         if f.read(len(self.HEADER)) != self.HEADER:
             raise error.CorruptedFileError()
         self.clear(flush=False)
         while True:
             buf = f.read(flaglen)
             if not buf:
                 break
             flag = ord(buf)
             rev = len(self._rev2hsh)
             if flag & renameflag:
                 path = self._readcstr(f)
                 self._renamerevs.append(rev)
                 self._renamepaths.append(pycompat.decodeutf8(path))
             hsh = f.read(hshlen)
             if len(hsh) != hshlen:
                 raise error.CorruptedFileError()
             self._hsh2rev[hsh] = rev
             self._rev2flag.append(flag)
             self._rev2hsh.append(hsh)
     self._lastmaxrev = self.maxrev
Exemple #21
0
def _getmissinglines(mapfile, missinghashes):
    missinglines = set()

    # Avoid expensive lookup through the map file if there is no missing hash.
    if not missinghashes:
        return missinglines

    linelen = 82
    hashestofind = missinghashes.copy()
    content = pycompat.decodeutf8(mapfile.read())
    if len(content) % linelen != 0:
        raise error.Abort(_("gitmeta: invalid mapfile length (%s)") % len(content))

    # Walk backwards through the map file, since recent commits are added at the
    # end.
    count = int(len(content) / linelen)
    for i in range(count - 1, -1, -1):
        offset = i * linelen
        line = content[offset : offset + linelen]
        hgsha = line[41:81]
        if hgsha in hashestofind:
            missinglines.add(line)

            # Return the missing lines if we found all of them.
            hashestofind.remove(hgsha)
            if not hashestofind:
                return missinglines

    raise error.Abort(_("gitmeta: missing hashes in file %s") % mapfile.name)
Exemple #22
0
 def _checkoutlinelogwithedits(self):
     """() -> [str]. prompt all lines for edit"""
     alllines = self.linelog.getalllines()
     # header
     editortext = (
         _(
             'HG: editing %s\nHG: "y" means the line to the right '
             "exists in the changeset to the top\nHG:\n"
         )
         % self.fctxs[-1].path()
     )
     # [(idx, fctx)]. hide the dummy emptyfilecontext
     visiblefctxs = [
         (i, f)
         for i, f in enumerate(self.fctxs)
         if not isinstance(f, emptyfilecontext)
     ]
     for i, (j, f) in enumerate(visiblefctxs):
         editortext += _("HG: %s/%s %s %s\n") % (
             "|" * i,
             "-" * (len(visiblefctxs) - i + 1),
             node.short(f.node()),
             f.description().split("\n", 1)[0],
         )
     editortext += _("HG: %s\n") % ("|" * len(visiblefctxs))
     # figure out the lifetime of a line, this is relatively inefficient,
     # but probably fine
     lineset = defaultdict(lambda: set())  # {(llrev, linenum): {llrev}}
     for i, f in visiblefctxs:
         self.linelog.annotate((i + 1) * 2)
         for l in self.linelog.annotateresult:
             lineset[l].add(i)
     # append lines
     for l in alllines:
         editortext += "    %s : %s" % (
             "".join([("y" if i in lineset[l] else " ") for i, _f in visiblefctxs]),
             decodeutf8(self._getline(l)),
         )
     # run editor
     editedtext = self.ui.edit(editortext, "", action="absorb")
     if not editedtext:
         raise error.Abort(_("empty editor text"))
     # parse edited result
     contents = [b"" for i in self.fctxs]
     leftpadpos = 4
     colonpos = leftpadpos + len(visiblefctxs) + 1
     for l in editedtext.splitlines(True):
         if l.startswith("HG:"):
             continue
         if l[colonpos - 1 : colonpos + 2] != " : ":
             raise error.Abort(_("malformed line: %s") % l)
         linecontent = encodeutf8(l[colonpos + 2 :])
         for i, ch in enumerate(l[leftpadpos : colonpos - 1]):
             if ch == "y":
                 contents[visiblefctxs[i][0]] += linecontent
     # chunkstats is hard to calculate if anything changes, therefore
     # set them to just a simple value (1, 1).
     if editedtext != editortext:
         self.chunkstats = [1, 1]
     return contents
Exemple #23
0
 def test_merge_poem(self):
     """Test case from diff3 manual"""
     m3 = Merge3(TZU, LAO, TAO)
     ml = list(m3.merge_lines(b"LAO", b"TAO"))
     self.log("merge result:")
     self.log(decodeutf8(b"".join(ml)))
     self.assertEqual(ml, MERGED_RESULT)
Exemple #24
0
 def getheads(self):
     if not self.revs:
         output, status = self.gitrun("rev-parse", "--branches", "--remotes")
         output = decodeutf8(output)
         heads = output.splitlines()
         if status:
             raise error.Abort(_("cannot retrieve git heads"))
     else:
         heads = []
         for rev in self.revs:
             rawhead, ret = self.gitrun("rev-parse", "--verify", rev)
             rawhead = decodeutf8(rawhead)
             heads.append(rawhead[:-1])
             if ret:
                 raise error.Abort(_('cannot retrieve git head "%s"') % rev)
     return heads
Exemple #25
0
def cloudstatus(ui, repo, **opts):
    """Shows information about the state of the user's workspace"""

    workspacename = workspace.currentworkspace(repo)
    if workspacename is None:
        ui.write(_("You are not connected to any workspace\n"))
        return
    ui.write(_("Workspace: %s\n") % workspacename)

    autosync = "ON" if background.autobackupenabled(repo) else "OFF"
    ui.write(_("Automatic Sync: %s\n") % autosync)

    state = syncstate.SyncState(repo, workspacename)

    ui.write(_("Last Sync Version: %s\n") % state.version)
    if state.maxage is not None:
        ui.write(_("Last Sync Maximum Commit Age: %s days\n") % state.maxage)
    ui.write(
        _("Last Sync Heads: %d (%d omitted)\n") %
        (len(state.heads), len(state.omittedheads)))
    ui.write(
        _("Last Sync Bookmarks: %d (%d omitted)\n") %
        (len(state.bookmarks), len(state.omittedbookmarks)))
    ui.write(
        _("Last Sync Remote Bookmarks: %d\n") % (len(state.remotebookmarks)))
    ui.write(_("Last Sync Snapshots: %d\n") % (len(state.snapshots)))

    ui.write(_("Last Sync Time: %s\n") % time.ctime(state.lastupdatetime))

    if repo.svfs.isfile(sync._syncstatusfile):
        status = pycompat.decodeutf8(repo.svfs.read(sync._syncstatusfile))
    else:
        status = "Not logged"
    ui.write(_("Last Sync Status: %s\n") % status)
Exemple #26
0
def _peersetup(ui, peer):
    if peer.capable("clienttelemetry"):
        logargs = clienttelemetryvaluesfromconfig(ui)
        logargs.update(
            {name: f(ui)
             for name, f in _clienttelemetryfuncs.items()})
        logargs.update(_clienttelemetrydata)
        response = decodeutf8(peer._call("clienttelemetry", **logargs))
        responseitems = response.split()
        peername = responseitems[0] if responseitems else ""
        peer._realhostname = peername
        peerinfo = {}
        for index in range(1, len(responseitems) - 1, 2):
            peerinfo[responseitems[index]] = responseitems[index + 1]
        peer._peerinfo = peerinfo
        blackbox.log(
            {"clienttelemetry": {
                "peername": peername,
                "peerinfo": peerinfo
            }})
        util.info("client-telemetry", peername=peername, **peerinfo)
        ann = ui.configbool("clienttelemetry", "announceremotehostname", None)
        if ann is None:
            ann = not ui.plain() and ui._isatty(ui.ferr)
        if ann and not ui.quiet:
            ui.write_err(_("connected to %s\n") % response)
            perftrace.tracevalue("Server", peername)
            for item, value in peerinfo.items():
                perftrace.tracevalue(f"Server {item}", value)
Exemple #27
0
def _parsepackmeta(metabuf):
    # type: (bytes) -> Dict[str, bytes]
    """parse datapack meta, bytes (<metadata-list>) -> dict

    The dict contains raw content - both keys and values are strings.
    Upper-level business may want to convert some of them to other types like
    integers, on their own.

    raise ValueError if the data is corrupted
    """
    metadict = {}
    offset = 0
    buflen = len(metabuf)
    while buflen - offset >= 3:
        key = pycompat.decodeutf8(struct.unpack_from("!c", metabuf, offset)[0])
        offset += 1
        metalen = struct.unpack_from("!H", metabuf, offset)[0]
        offset += 2
        if offset + metalen > buflen:
            raise ValueError("corrupted metadata: incomplete buffer")
        value = metabuf[offset:offset + metalen]
        metadict[key] = value
        offset += metalen
    if offset != buflen:
        raise ValueError("corrupted metadata: redundant data")
    return metadict
Exemple #28
0
def sendunbundlereplaybatch(ui, **opts):
    """Send a batch of unbundlereplay wireproto commands to a given server

    This exists to amortize the costs of `hg.peer` creation over multiple
    `unbundlereplay` calls.

    Reads `(bundlefile, timestampsfile, ontobook, rebasedhead)` from
    stdin. See docs of `sendunbundlereplay` for more details.

    Takes the `reports` argument on the command line. After each unbundlereplay
    command is successfully executed, will write and flush a single line
    into this file, thus reporting progress. File is truncated at the beginning
    of this function.

    ``sendunbundlereplay.respondlightly`` config option instructs the server
    to avoid sending large bundle2 parts back.
    """
    if not opts.get("reports"):
        raise error.Abort("--reports argument is required")
    path = opts["path"]
    returncode = 0
    remote = getremote(ui, path)
    ui.debug("using %s as a reports file\n" % opts["reports"])
    with open(opts["reports"], "wb", 0) as reportsfile:
        counter = 0
        while True:
            line = sys.stdin.readline()
            if line == "":
                break

            # The newest sync job sends 5 parameters, but older versions send 4.
            # We default the last parameter to None for compatibility.
            parts = line.split()
            if len(parts) == 4:
                parts.append(None)
            (bfname, tsfname, ontobook, rebasedhead, logfile) = parts
            ontobook = decodeutf8(base64.b64decode(ontobook))

            rebasedhead = None if rebasedhead == "DELETED" else rebasedhead
            commitdates = getcommitdates(ui, tsfname)
            stream = getstream(bfname)

            with capturelogs(ui, remote, logfile):
                returncode = runreplay(
                    ui, remote, stream, commitdates, rebasedhead, ontobook
                )

            if returncode != 0:
                # the word "failed" is an identifier of failure, do not change
                failure = "unbundle replay batch item #%i failed\n" % counter
                ui.warn(failure)
                writereport(reportsfile, failure)
                break
            success = "unbundle replay batch item #%i successfully sent\n" % counter
            ui.warn(success)
            writereport(reportsfile, success)
            counter += 1

    return returncode
Exemple #29
0
 def getflogheads(self, path):
     if not self.capable("getflogheads"):
         raise error.Abort("configured remotefile server does not "
                           "support getflogheads")
     f = wireproto.future()
     yield {"path": path}, f
     heads = pycompat.decodeutf8(f.value).split("\n") if f.value else []
     yield heads
Exemple #30
0
def knownnodes(self, nodes):
    f = peer.future()
    yield {"nodes": wireproto.encodelist(nodes)}, f
    d = f.value
    try:
        yield [bool(int(b)) for b in pycompat.decodeutf8(d)]
    except ValueError:
        error.Abort(error.ResponseError(_("unexpected response:"), d))