コード例 #1
0
def _execute(ui, repo, target=None):
    script = ui.config("stablerev", "script")
    if script is None:
        raise error.ConfigError(_("must set stablerev.script"))

    # Pass '--target $TARGET' for compatibility.
    # XXX: Remove this once the new code has been rolled out for some time.
    if target is not None:
        script += " --target %s" % util.shellquote(target)
    try:
        ui.debug("repo-specific script for stable: %s\n" % script)
        reporoot = repo.wvfs.join("")
        env = encoding.environ.copy()
        env.update({"REAL_CWD": pycompat.getcwd(), "HG_ROOT": reporoot})
        if target is not None:
            env["TARGET"] = target
        ui.debug("setting current working directory to: %s\n" % reporoot)
        p = subprocess.Popen(
            script,
            shell=True,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            close_fds=util.closefds,
            cwd=reporoot,
            env=env,
        )
        res = p.communicate()
        ui.debug("stable script returns: %r\n" % (res, ))
        return res
    except subprocess.CalledProcessError as e:
        raise error.Abort(_("couldn't fetch stable rev: %s") % e)
コード例 #2
0
 def _parse_view(self, path):
     "Read changes affecting the path"
     cmd = "p4 -G changes -s submitted %s" % util.shellquote(path)
     stdout = util.popen(cmd, mode="rb")
     p4changes = {}
     for d in loaditer(stdout):
         c = d.get("change", None)
         if c:
             p4changes[c] = True
     return p4changes
コード例 #3
0
def uisetup(ui):
    for cmd, path in ui.configitems("extdiff"):
        path = util.expandpath(path)
        if cmd.startswith("cmd."):
            cmd = cmd[4:]
            if not path:
                path = util.findexe(cmd)
                if path is None:
                    path = filemerge.findexternaltool(ui, cmd) or cmd
            diffopts = ui.config("extdiff", "opts." + cmd)
            cmdline = util.shellquote(path)
            if diffopts:
                cmdline += " " + diffopts
        elif cmd.startswith("opts."):
            continue
        else:
            if path:
                # case "cmd = path opts"
                cmdline = path
                diffopts = len(pycompat.shlexsplit(cmdline)) > 1
            else:
                # case "cmd ="
                path = util.findexe(cmd)
                if path is None:
                    path = filemerge.findexternaltool(ui, cmd) or cmd
                cmdline = util.shellquote(path)
                diffopts = False
        # look for diff arguments in [diff-tools] then [merge-tools]
        if not diffopts:
            args = ui.config("diff-tools", cmd + ".diffargs") or ui.config(
                "merge-tools", cmd + ".diffargs"
            )
            if args:
                cmdline += " " + args
        command(
            cmd, extdiffopts[:], _("hg %s [OPTION]... [FILE]...") % cmd, inferrepo=True
        )(savedcmd(path, cmdline))
コード例 #4
0
def _lookupstables(repo, ctx):
    ui = repo.ui

    stablesscript = ui.config("stablerev", "stablesscript")
    if stablesscript is None:
        raise error.ConfigError(_("must set stablerev.stablesscript"))

    stablesscript = stablesscript.format(nodeid=util.shellquote(ctx.hex()))

    stdout = _executescript(stablesscript, repo)

    try:
        committostables = json.loads(stdout)
    except Exception as e:
        raise error.Abort(
            _("couldn't parse stablesscript stdout as json: %s") % e)

    return committostables.get(ctx.hex(), [])
コード例 #5
0
 def _cmdline(self, cmd, *args, **kwargs):
     cmdline = [self.command, cmd] + list(args)
     for k, v in pycompat.iteritems(kwargs):
         if len(k) == 1:
             cmdline.append("-" + k)
         else:
             cmdline.append("--" + k.replace("_", "-"))
         try:
             if len(k) == 1:
                 cmdline.append("" + v)
             else:
                 cmdline[-1] += "=" + v
         except TypeError:
             pass
     cmdline = [util.shellquote(arg) for arg in cmdline]
     if not self.ui.debugflag:
         cmdline += ["2>", os.devnull]
     cmdline = " ".join(cmdline)
     return cmdline
コード例 #6
0
 def run_command(self, host, command, username=None, port=None):
     if isinstance(command, (str, bytes)):
         # 0.12.x dulwich sends the raw string
         command = [command]
     elif len(command) > 1:
         # 0.11.x dulwich sends an array of [command arg1 arg2 ...], so
         # we detect that here and reformat it back to what hg-git
         # expects (e.g. "command 'arg1 arg2'")
         command = ["%s '%s'" % (command[0], " ".join(command[1:]))]
     sshcmd = ui.config("ui", "ssh")
     args = util.sshargs(sshcmd, host, username, port)
     cmd = "%s %s %s" % (sshcmd, args, util.shellquote(
         " ".join(command)))
     ui.debug("calling ssh: %s\n" % cmd)
     proc = subprocess.Popen(
         util.quotecommand(cmd),
         shell=True,
         stdin=subprocess.PIPE,
         stdout=subprocess.PIPE,
     )
     return SubprocessWrapper(proc)
コード例 #7
0
 def _getlog(
     self,
     paths,
     start,
     end,
     limit=0,
     discover_changed_paths=True,
     strict_node_history=False,
 ):
     # Normalize path names, svn >= 1.5 only wants paths relative to
     # supplied URL
     relpaths = []
     for p in paths:
         if not p.startswith("/"):
             p = self.module + "/" + p
         relpaths.append(p.strip("/"))
     args = [
         self.baseurl,
         relpaths,
         start,
         end,
         limit,
         discover_changed_paths,
         strict_node_history,
     ]
     # developer config: convert.svn.debugsvnlog
     if not self.ui.configbool("convert", "svn.debugsvnlog"):
         return directlogstream(*args)
     arg = encodeargs(args)
     hgexe = util.hgexecutable()
     cmd = "%s debugsvnlog" % util.shellquote(hgexe)
     stdin, stdout = util.popen2(util.quotecommand(cmd))
     stdin.write(arg)
     try:
         stdin.close()
     except IOError:
         raise error.Abort(
             _("Mercurial failed to run itself, check" " hg executable is in PATH")
         )
     return logstream(stdout)
コード例 #8
0
ファイル: githelp.py プロジェクト: jsoref/eden
def lsfiles(ui, repo, *args, **kwargs):
    cmdoptions = [
        ("c", "cached", None, ""),
        ("d", "deleted", None, ""),
        ("m", "modified", None, ""),
        ("o", "others", None, ""),
        ("i", "ignored", None, ""),
        ("s", "stage", None, ""),
        ("z", "_zero", None, ""),
    ]
    args, opts = parseoptions(ui, cmdoptions, args)

    if (
        opts.get("modified")
        or opts.get("deleted")
        or opts.get("others")
        or opts.get("ignored")
    ):
        cmd = Command("status")
        if opts.get("deleted"):
            cmd["-d"] = None
        if opts.get("modified"):
            cmd["-m"] = None
        if opts.get("others"):
            cmd["-o"] = None
        if opts.get("ignored"):
            cmd["-i"] = None
    else:
        cmd = Command("files")
    if opts.get("stage"):
        ui.status(
            _("note: Mercurial doesn't have a staging area, ignoring " "--stage\n")
        )
    if opts.get("_zero"):
        cmd["-0"] = None
    cmd.append(".")
    for include in args:
        cmd["-I"] = util.shellquote(include)

    ui.status((str(cmd)), "\n")
コード例 #9
0
def _executeandparse(ui, repo, target=None):
    script = ui.config("stablerev", "script")
    if script is None:
        raise error.ConfigError(_("must set stablerev.script"))

    # Pass '--target $TARGET' for compatibility.
    # XXX: Remove this once the new code has been rolled out for some time.
    env = {}
    if target is not None:
        script += " --target %s" % util.shellquote(target)
        env["TARGET"] = target

    stdout = _executescript(script, repo, env)

    try:
        # Prefer JSON output first.
        data = json.loads(stdout)
        if "node" in data:
            return _validaterevspec(ui, data["node"])
    except Exception:
        pass

    # Fall back to stdout:
    return _validaterevspec(ui, stdout.strip())
コード例 #10
0
 def quote(match):
     pre = match.group(2)
     key = match.group(3)
     if not do3way and key == "parent2":
         return pre
     return pre + util.shellquote(replace[key])
コード例 #11
0
def backgroundbackup(repo, command=None, dest=None):
    """start background backup"""
    ui = repo.ui
    if command is not None:
        background_cmd = command
    elif workspace.currentworkspace(repo):
        background_cmd = ["hg", "cloud", "sync"]
    else:
        background_cmd = ["hg", "cloud", "backup"]
    infinitepush_bgssh = ui.config("infinitepush", "bgssh")
    if infinitepush_bgssh:
        background_cmd += ["--config", "ui.ssh=%s" % infinitepush_bgssh]

    # developer config: infinitepushbackup.bgdebuglocks
    if ui.configbool("infinitepushbackup", "bgdebuglocks"):
        background_cmd += ["--config", "devel.debug-lockers=true"]

    # developer config: infinitepushbackup.bgdebug
    if ui.configbool("infinitepushbackup", "bgdebug", False):
        background_cmd.append("--debug")

    if dest:
        background_cmd += ["--dest", dest]

    logfile = None
    logdir = ui.config("infinitepushbackup", "logdir")
    if logdir:
        # make newly created files and dirs non-writable
        oldumask = os.umask(0o022)
        try:
            try:
                # the user name from the machine
                username = util.getuser()
            except Exception:
                username = "******"

            if not _checkcommonlogdir(logdir):
                raise WrongPermissionsException(logdir)

            userlogdir = os.path.join(logdir, username)
            util.makedirs(userlogdir)

            if not _checkuserlogdir(userlogdir):
                raise WrongPermissionsException(userlogdir)

            reponame = os.path.basename(repo.sharedroot)
            _removeoldlogfiles(userlogdir, reponame)
            logfile = getlogfilename(logdir, username, reponame)
        except (OSError, IOError) as e:
            ui.debug("background backup log is disabled: %s\n" % e)
        except WrongPermissionsException as e:
            ui.debug(
                (
                    "%s directory has incorrect permission, "
                    + "background backup logging will be disabled\n"
                )
                % e.logdir
            )
        finally:
            os.umask(oldumask)

    if not logfile:
        logfile = os.devnull

    with open(logfile, "a") as f:
        timestamp = util.datestr(util.makedate(), "%Y-%m-%d %H:%M:%S %z")
        fullcmd = " ".join(util.shellquote(arg) for arg in background_cmd)
        f.write("\n%s starting: %s\n" % (timestamp, fullcmd))

    Stdio = bindings.process.Stdio
    out = Stdio.open(logfile, append=True, create=True)
    bindings.process.Command.new(background_cmd[0]).args(
        background_cmd[1:]
    ).avoidinherithandles().newsession().stdin(Stdio.null()).stdout(out).stderr(
        out
    ).spawn()
コード例 #12
0
def checkhgspeed(ui, url, opts):
    speedcmd = ui.config("debugnetwork", "speed-test-command")
    if speedcmd is None:
        ui.status(
            _("Not testing connection speed: 'debugnetwork.speed-test-command' is not set"
              ),
            component="debugnetwork",
        )
        return True
    ui.status(_("Testing connection speed to the server\n"),
              component="debugnetwork")
    rui = hg.remoteui(ui, opts)
    sshcmd = rui.config("ui", "ssh")
    sshaddenv = dict(rui.configitems("sshenv"))
    sshenv = util.shellenviron(sshaddenv)
    args = util.sshargs(sshcmd, url.host, url.user, url.port)
    download = ui.configbytes("debugnetwork", "speed-test-download-size",
                              10000000)
    upload = ui.configbytes("debugnetwork", "speed-test-upload-size", 1000000)

    cmd = "%s %s %s" % (sshcmd, args,
                        util.shellquote(sshpeer._serverquote(speedcmd)))
    pipeo, pipei, pipee, sub = util.popen4(cmd, bufsize=0, env=sshenv)
    pipee = sshpeer.threadedstderr(rui, pipee)
    pipee.start()

    def latencytest(count):
        # Use the upload endpoint for the latency test.  We will time how long it
        # takes for the server to return the "upload complete" response for a
        # single byte upload.
        latencies = []
        with progress.spinner(ui, "testing connection latency"):
            for i in range(count):
                pipeo.write(b"upload 1\n")
                pipeo.flush()
                l = pipei.readline()
                if l != b"upload bytes 1\n":
                    raise error.Abort("invalid response from server: %r" % l)
                starttime = util.timer()
                pipeo.write(b"\n")
                pipeo.flush()
                l = pipei.readline()
                endtime = util.timer()
                if l != b"upload complete\n":
                    raise error.Abort("invalid response from server: %r" % l)
                latencies.append(endtime - starttime)
        return latencies

    def downloadtest(description, bytecount):
        pipeo.write(b"download %i\n" % bytecount)
        pipeo.flush()
        l = pipei.readline()
        if not l or not l.startswith(b"download bytes"):
            raise error.Abort("invalid response from server: %r" % l)
        bytecount = int(l.split()[2])
        with progress.bar(ui,
                          description,
                          total=bytecount,
                          formatfunc=util.bytecount) as prog:
            starttime = util.timer()
            remaining = bytecount
            while remaining > 0:
                data = pipei.read(min(remaining, BLOCK_SIZE))
                if not data:
                    raise error.Abort(
                        "premature end of speed-test download stream")
                remaining -= len(data)
                prog.value = bytecount - remaining
            l = pipei.readline()
            if not l or not l.startswith(b"download complete"):
                raise error.Abort("invalid response from server: %r" % l)
            endtime = util.timer()
        return endtime - starttime

    def uploadtest(description, bytecount):
        pipeo.write(b"upload %i\n" % bytecount)
        pipeo.flush()
        l = pipei.readline()
        if not l or not l.startswith(b"upload bytes"):
            raise error.Abort("invalid response from server: %r" % l)
        bytecount = int(l.split()[2])
        with progress.bar(ui,
                          description,
                          total=bytecount,
                          formatfunc=util.bytecount) as prog:
            starttime = util.timer()
            remaining = bytecount
            while remaining > 0:
                data = os.urandom(min(remaining, BLOCK_SIZE))
                remaining -= len(data)
                pipeo.write(data)
                prog.value = bytecount - remaining
            pipeo.flush()
            l = pipei.readline()
            if not l or not l.startswith(b"upload complete"):
                raise error.Abort("invalid response from server: %r" % l)
            endtime = util.timer()
        return endtime - starttime

    return drivespeedtests(
        ui,
        (latencytest, 5),
        (downloadtest, "download", download),
        (uploadtest, "upload", upload),
    )
コード例 #13
0
ファイル: debugnetwork.py プロジェクト: xmonader/eden
def checkhgspeed(ui, url, opts):
    speedcmd = ui.config("debugnetwork", "speed-test-command")
    if speedcmd is None:
        ui.status(
            _("Not testing connection speed: 'debugnetwork.speed-test-command' is not set"
              ),
            component="debugnetwork",
        )
        return True
    ui.status(_("Testing connection speed to the server\n"),
              component="debugnetwork")
    rui = hg.remoteui(ui, opts)
    sshcmd = rui.config("ui", "ssh")
    sshaddenv = dict(rui.configitems("sshenv"))
    sshenv = util.shellenviron(sshaddenv)
    args = util.sshargs(sshcmd, url.host, url.user, url.port)
    download = ui.configbytes("debugnetwork", "speed-test-download-size",
                              10000000)
    upload = ui.configbytes("debugnetwork", "speed-test-upload-size", 1000000)

    cmd = "%s %s %s" % (sshcmd, args,
                        util.shellquote(sshpeer._serverquote(speedcmd)))
    pipeo, pipei, pipee, sub = util.popen4(cmd, bufsize=0, env=sshenv)
    pipee = sshpeer.threadedstderr(rui, pipee)
    pipee.start()

    def latencytest(count):
        # Use the upload endpoint for the latency test.  We will time how long it
        # takes for the server to return the "upload complete" response for a
        # single byte upload.
        latencies = []
        with progress.spinner(ui, "testing connection latency"):
            for i in range(count):
                pipeo.write("upload 1\n")
                pipeo.flush()
                l = pipei.readline()
                if l != "upload bytes 1\n":
                    raise error.Abort("invalid response from server: %r" % l)
                starttime = util.timer()
                pipeo.write("\n")
                pipeo.flush()
                l = pipei.readline()
                endtime = util.timer()
                if l != "upload complete\n":
                    raise error.Abort("invalid response from server: %r" % l)
                latencies.append(endtime - starttime)
        return latencies

    def downloadtest(description, bytecount):
        pipeo.write("download %s\n" % bytecount)
        pipeo.flush()
        l = pipei.readline()
        if not l or not l.startswith("download bytes"):
            raise error.Abort("invalid response from server: %r" % l)
        bytecount = int(l.split()[2])
        with progress.bar(ui,
                          description,
                          total=bytecount,
                          formatfunc=util.bytecount) as prog:
            starttime = util.timer()
            remaining = bytecount
            while remaining > 0:
                data = pipei.read(min(remaining, BLOCK_SIZE))
                if not data:
                    raise error.Abort(
                        "premature end of speed-test download stream")
                remaining -= len(data)
                prog.value = bytecount - remaining
            l = pipei.readline()
            if not l or not l.startswith("download complete"):
                raise error.Abort("invalid response from server: %r" % l)
            endtime = util.timer()
        return endtime - starttime

    def uploadtest(description, bytecount):
        pipeo.write("upload %s\n" % bytecount)
        pipeo.flush()
        l = pipei.readline()
        if not l or not l.startswith("upload bytes"):
            raise error.Abort("invalid response from server: %r" % l)
        bytecount = int(l.split()[2])
        with progress.bar(ui,
                          description,
                          total=bytecount,
                          formatfunc=util.bytecount) as prog:
            starttime = util.timer()
            remaining = bytecount
            while remaining > 0:
                data = os.urandom(min(remaining, BLOCK_SIZE))
                remaining -= len(data)
                pipeo.write(data)
                prog.value = bytecount - remaining
            pipeo.flush()
            l = pipei.readline()
            if not l or not l.startswith("upload complete"):
                raise error.Abort("invalid response from server: %r" % l)
            endtime = util.timer()
        return endtime - starttime

    def printresult(testname, bytecount, testtime):
        byterate = bytecount / testtime
        ui.status(
            _("Speed: %s %s in %s (%0.2f Mbit/s, %0.2f MiB/s)\n") % (
                testname,
                util.bytecount(bytecount),
                util.timecount(testtime),
                8 * byterate / 1000000,
                byterate / (1024 * 1024),
            ),
            component="debugnetwork",
        )

    try:
        latencies = latencytest(5)
        latency = sum(latencies, 0) / len(latencies)
        ui.status(
            _("Latency: %s (average of %s round-trips)\n") %
            (util.timecount(latency), len(latencies)),
            component="debugnetwork",
        )

        for testfunc, testname, bytecount in [
            (downloadtest, "download", download),
            (uploadtest, "upload", upload),
        ]:
            warmuptime = testfunc("warming up for %s test" % testname,
                                  bytecount)
            if warmuptime < 0.2:
                # The network is sufficiently fast that we warmed up in <200ms.
                # To make the test more meaningful, increase the size of data
                # 25x (which should give a maximum test time of 5s).
                bytecount *= 25
                warmuptime = testfunc(
                    "warming up for large %s test" % testname, bytecount)
            printresult("(round 1) %sed" % testname, bytecount, warmuptime)
            testtime = testfunc(testname, bytecount)
            printresult("(round 2) %sed" % testname, bytecount, testtime)
        return True
    except Exception:
        return False
コード例 #14
0
    def getfile(self, name, rev):
        cmd = "p4 -G print %s" % util.shellquote("%s#%s" %
                                                 (self.depotname[name], rev))

        lasterror = None
        while True:
            stdout = util.popen(cmd, mode="rb")

            mode = None
            contents = []
            keywords = None

            for d in loaditer(stdout):
                code = d["code"]
                data = d.get("data")

                if code == "error":
                    # if this is the first time error happened
                    # re-attempt getting the file
                    if not lasterror:
                        lasterror = IOError(d["generic"], data)
                        # this will exit inner-most for-loop
                        break
                    else:
                        raise lasterror

                elif code == "stat":
                    action = d.get("action")
                    if action in ["purge", "delete", "move/delete"]:
                        return None, None
                    p4type = self.re_type.match(d["type"])
                    if p4type:
                        mode = ""
                        flags = (p4type.group(1) or "") + (p4type.group(3)
                                                           or "")
                        if "x" in flags:
                            mode = "x"
                        if p4type.group(2) == "symlink":
                            mode = "l"
                        if "ko" in flags:
                            keywords = self.re_keywords_old
                        elif "k" in flags:
                            keywords = self.re_keywords

                elif code == "text" or code == "binary":
                    contents.append(data)

                lasterror = None

            if not lasterror:
                break

        if mode is None:
            return None, None

        contents = "".join(contents)

        if keywords:
            contents = keywords.sub("$\\1$", contents)
        if mode == "l" and contents.endswith("\n"):
            contents = contents[:-1]

        return contents, mode
コード例 #15
0
    def _parse(self, ui, path):
        "Prepare list of P4 filenames and revisions to import"
        p4changes = {}
        changeset = {}
        files_map = {}
        copies_map = {}
        localname = {}
        depotname = {}
        heads = []

        ui.status(_("reading p4 views\n"))

        # read client spec or view
        if "/" in path:
            p4changes.update(self._parse_view(path))
            if path.startswith("//") and path.endswith("/..."):
                views = {path[:-3]: ""}
            else:
                views = {"//": ""}
        else:
            cmd = "p4 -G client -o %s" % util.shellquote(path)
            clientspec = marshal.load(util.popen(cmd, mode="rb"))

            views = {}
            for client in clientspec:
                if client.startswith("View"):
                    sview, cview = clientspec[client].split()
                    p4changes.update(self._parse_view(sview))
                    if sview.endswith("...") and cview.endswith("..."):
                        sview = sview[:-3]
                        cview = cview[:-3]
                    cview = cview[2:]
                    cview = cview[cview.find("/") + 1:]
                    views[sview] = cview

        # list of changes that affect our source files
        p4changes = p4changes.keys()
        p4changes.sort(key=int)

        # list with depot pathnames, longest first
        vieworder = views.keys()
        vieworder.sort(key=len, reverse=True)

        # handle revision limiting
        startrev = self.ui.config("convert", "p4.startrev")

        # now read the full changelists to get the list of file revisions
        ui.status(_("collecting p4 changelists\n"))
        lastid = None
        for change in p4changes:
            if startrev and int(change) < int(startrev):
                continue
            if self.revs and int(change) > int(self.revs[0]):
                continue
            if change in self.revmap:
                # Ignore already present revisions, but set the parent pointer.
                lastid = change
                continue

            if lastid:
                parents = [lastid]
            else:
                parents = []

            d = self._fetch_revision(change)
            c = self._construct_commit(d, parents)

            descarr = c.desc.splitlines(True)
            if len(descarr) > 0:
                shortdesc = descarr[0].rstrip("\r\n")
            else:
                shortdesc = "**empty changelist description**"

            t = "%s %s" % (c.rev, repr(shortdesc)[1:-1])
            ui.status(util.ellipsis(t, 80) + "\n")

            files = []
            copies = {}
            copiedfiles = []
            i = 0
            while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
                oldname = d["depotFile%d" % i]
                filename = None
                for v in vieworder:
                    if oldname.lower().startswith(v.lower()):
                        filename = decodefilename(views[v] + oldname[len(v):])
                        break
                if filename:
                    files.append((filename, d["rev%d" % i]))
                    depotname[filename] = oldname
                    if d.get("action%d" % i) == "move/add":
                        copiedfiles.append(filename)
                    localname[oldname] = filename
                i += 1

            # Collect information about copied files
            for filename in copiedfiles:
                oldname = depotname[filename]

                flcmd = "p4 -G filelog %s" % util.shellquote(oldname)
                flstdout = util.popen(flcmd, mode="rb")

                copiedfilename = None
                for d in loaditer(flstdout):
                    copiedoldname = None

                    i = 0
                    while ("change%d" % i) in d:
                        if (d["change%d" % i] == change
                                and d["action%d" % i] == "move/add"):
                            j = 0
                            while ("file%d,%d" % (i, j)) in d:
                                if d["how%d,%d" % (i, j)] == "moved from":
                                    copiedoldname = d["file%d,%d" % (i, j)]
                                    break
                                j += 1
                        i += 1

                    if copiedoldname and copiedoldname in localname:
                        copiedfilename = localname[copiedoldname]
                        break

                if copiedfilename:
                    copies[filename] = copiedfilename
                else:
                    ui.warn(
                        _("cannot find source for copied file: %s@%s\n") %
                        (filename, change))

            changeset[change] = c
            files_map[change] = files
            copies_map[change] = copies
            lastid = change

        if lastid and len(changeset) > 0:
            heads = [lastid]

        return {
            "changeset": changeset,
            "files": files_map,
            "copies": copies_map,
            "heads": heads,
            "depotname": depotname,
        }