Beispiel #1
0
def overlaycontext(memworkingcopy,
                   ctx,
                   parents=None,
                   extra=None,
                   loginfo=None,
                   mutinfo=None):
    """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
    memworkingcopy overrides file contents.
    """
    # parents must contain 2 items: (node1, node2)
    if parents is None:
        parents = ctx.repo().changelog.parents(ctx.node())
    if extra is None:
        extra = ctx.extra()
    date = ctx.date()
    desc = ctx.description()
    user = ctx.user()
    files = set(ctx.files()).union(pycompat.iterkeys(memworkingcopy))
    store = overlaystore(ctx, memworkingcopy)
    return context.memctx(
        repo=ctx.repo(),
        parents=parents,
        text=desc,
        files=files,
        filectxfn=store,
        user=user,
        date=date,
        branch=None,
        extra=extra,
        loginfo=loginfo,
        mutinfo=mutinfo,
    )
Beispiel #2
0
def debughiddencommit(ui, repo, *pats, **opts):
    """
    commit to commit cloud

    This command adds a commit to the commit cloud by committing
    locally, sending to commit cloud, then hiding it.

    Files in the working copy will not be changed.

    Commit hash is printed as a result of this command.
    """
    with backuplock.lock(repo), repo.wlock():
        status = repo.status()
        files = status.modified + status.added + status.removed + status.deleted
        removed = set(status.removed + status.deleted)
        user = ui.username()
        extra = {}
        date = None
        wctx = repo[None]

        matcher = scmutil.match(wctx, pats, opts, emptyalways=False)
        ignored = bool(opts.get("ignored_files"))
        includefiles = [
            x for ff in repo.dirstate.status(matcher, ignored, False, True) for x in ff
        ]
        files = list(set(files).union(set(includefiles)))

        def getfilectx(repo, memctx, path):
            if path in removed:
                return None

            return wctx[path]

        node = memctx(
            repo,
            [wctx.p1().node(), nullid],
            "Ephemeral commit",
            sorted(files),
            getfilectx,
            user,
            date,
            extra,
        ).commit()

        try:
            uploaded, failed = backup.backupwithlockheld(repo, [int(repo[node])])
        finally:
            # Be sure to hide the commit, even if the backup fails
            visibility.remove(repo, [node])

    if failed:
        return 2

    ui.write(_("%s\n") % hex(node))
Beispiel #3
0
def copyctx(newrepo, ctx):
    cl = newrepo.changelog
    p1 = cl.node(len(cl) - 1)
    files = ctx.files()
    desc = "copied: %s" % ctx.description()

    def getfctx(repo, memctx, path):
        if path not in ctx:
            return None
        return context.overlayfilectx(ctx[path])

    return context.memctx(newrepo, [p1, None], desc, files, getfctx)
Beispiel #4
0
    def _pushsingleunchecked(ctx, commit, getcommitdatefn=None):
        """Return newly pushed node"""
        repo = ctx.repo()

        def getfilectx(repo, memctx, path):
            assert path in commit.filechanges
            entry = commit.filechanges[path]
            if entry is None:
                # deleted
                return None
            else:
                # changed or created
                mode, content, copysource = entry
                return context.memfilectx(
                    repo,
                    memctx,
                    path,
                    content,
                    islink=("l" in mode),
                    isexec=("x" in mode),
                    copied=copysource,
                )

        extra = commit.extra.copy()
        date = commit.date
        loginfo = {}
        mutinfo = None

        orignode = commit.orignode
        if orignode:
            mutinfo = mutation.record(repo, extra, [orignode], "pushrebase")
            loginfo = {"predecessors": hex(orignode), "mutation": "pushrebase"}
            date = getcommitdatefn(repo.ui, hex(orignode), commit.date)

        return context.memctx(
            repo,
            [ctx.node(), nullid],
            commit.desc,
            sorted(commit.filechanges),
            getfilectx,
            commit.user,
            date,
            extra,
            loginfo=loginfo,
            mutinfo=mutinfo,
        ).commit()
Beispiel #5
0
def debugephemeralcommit(ui, repo, **opts):
    """
    commit to commit cloud

    This command adds a commit to the commit cloud by committing
    locally, sending to commit cloud, then hiding it.

    Files in the working copy will not be changed.

    Commit hash is printed as a result of this command.
    """
    with repo.wlock():
        status = repo.status()
        files = status.modified + status.added + status.removed + status.deleted
        removed = set(status.removed + status.deleted)
        user = ui.username()
        extra = {}
        date = None

        def getfilectx(repo, memctx, path):
            if path in removed:
                return None

            return wctx[path]

        wctx = repo[None]

        node = memctx(
            repo,
            [wctx.p1().node(), nullid],
            "Ephemeral commit",
            sorted(files),
            getfilectx,
            user,
            date,
            extra,
        ).commit()

        visibility.remove(repo, [node])

    backup.backup(repo, [int(repo[node])])

    ui.write(_("%s\n") % hex(node))
Beispiel #6
0
def _snapshot2ctx(repo, snapshot):
    """Build a memctx for this snapshot.

    This is not precisely correct as it doesn't differentiate untracked/added
    but it's good enough for diffing.
    """

    parent = snapshot["hg_parents"]
    # Once merges/conflicted states are supported, we'll need to support more
    # than one parent
    assert isinstance(parent, bytes)
    # Fetch parent if not present locally
    if parent not in repo:
        repo.pull(headnodes=(parent, ))

    parents = (parent, nullid)
    path2filechange = {f[0]: f[1] for f in snapshot["file_changes"]}

    def token2cacheable(token):
        data = token["data"]
        return pickle.dumps((data["id"], data["bubble_id"]))

    cache = {}

    def getfile(repo, memctx, path):
        change = path2filechange.get(path)
        if change is None:
            return repo[parent][path]
        if change == "Deletion" or change == "UntrackedDeletion":
            return None
        elif "Change" in change or "UntrackedChange" in change:
            change = change.get("Change") or change["UntrackedChange"]
            token = change["upload_token"]
            key = token2cacheable(token)
            if key not in cache:
                # Possible future optimisation: Download files in parallel
                cache[key] = repo.edenapi.downloadfiletomemory(token)
            islink = change["file_type"] == "Symlink"
            isexec = change["file_type"] == "Executable"
            return memfilectx(repo,
                              None,
                              path,
                              data=cache[key],
                              islink=islink,
                              isexec=isexec)
        else:
            raise error.Abort(_("Unknown file change {}").format(change))

    time, tz = snapshot["time"], snapshot["tz"]
    if time or tz:
        date = (time, tz)
    else:
        date = None

    ctx = memctx(
        repo,
        parents,
        text="",
        files=list(path2filechange.keys()),
        filectxfn=getfile,
        user=snapshot["author"] or None,
        date=date,
    )
    return ctx
Beispiel #7
0
    def putcommit(self, files, copies, parents, commit, source, revmap, full,
                  cleanp2):
        files = dict(files)

        def getfilectx(repo, memctx, f):
            if p2ctx and f in p2files and f not in copies:
                self.ui.debug("reusing %s from p2\n" % f)
                try:
                    return p2ctx[f]
                except error.ManifestLookupError:
                    # If the file doesn't exist in p2, then we're syncing a
                    # delete, so just return None.
                    return None
            try:
                v = files[f]
            except KeyError:
                return None
            data, mode = source.getfile(f, v)
            if data is None:
                return None
            return context.memfilectx(self.repo, memctx, f, data, "l" in mode,
                                      "x" in mode, copies.get(f))

        pl = []
        for p in parents:
            if p not in pl:
                pl.append(p)
        parents = pl
        nparents = len(parents)
        if self.filemapmode and nparents == 1:
            m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
            parent = parents[0]

        if len(parents) < 2:
            parents.append(nodemod.nullid)
        if len(parents) < 2:
            parents.append(nodemod.nullid)
        p2 = parents.pop(0)

        text = commit.desc

        sha1s = re.findall(sha1re, text)
        for sha1 in sha1s:
            try:
                oldrev = source.lookuprev(sha1)
                newrev = revmap.get(oldrev)
                if newrev is not None:
                    text = text.replace(sha1, newrev[:len(sha1)])
            except Exception:
                # Don't crash if we find a bad sha in the message
                continue

        extra = commit.extra.copy()

        sourcename = self.repo.ui.config("convert", "hg.sourcename")
        if sourcename:
            extra["convert_source"] = sourcename

        for label in (
                "source",
                "transplant_source",
                "rebase_source",
                "intermediate-source",
        ):
            node = extra.get(label)

            if node is None:
                continue

            # Only transplant stores its reference in binary
            if label == "transplant_source":
                node = nodemod.hex(node)

            newrev = revmap.get(node)
            if newrev is not None:
                if label == "transplant_source":
                    newrev = nodemod.bin(newrev)

                extra[label] = newrev

        if self.branchnames and commit.branch:
            extra["branch"] = commit.branch
        if commit.rev and commit.saverev:
            extra["convert_revision"] = commit.rev

        while parents:
            p1 = p2
            p2 = parents.pop(0)
            p1ctx = self.repo[p1]
            p2ctx = None
            if p2 != nodemod.nullid:
                p2ctx = self.repo[p2]
            fileset = set(files)
            if full:
                fileset.update(self.repo[p1])
                fileset.update(self.repo[p2])

            if p2ctx:
                p2files = set(cleanp2)
                for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
                    p2files.add(file)
                    fileset.add(file)

            ctx = context.memctx(
                self.repo,
                (p1, p2),
                text,
                fileset,
                getfilectx,
                commit.author,
                commit.date,
                extra,
            )

            # We won't know if the conversion changes the node until after the
            # commit, so copy the source's phase for now.
            self.repo.ui.setconfig("phases", "new-commit",
                                   phases.phasenames[commit.phase], "convert")

            with self.repo.transaction("convert") as tr:
                node = nodemod.hex(self.repo.commitctx(ctx))

                # If the node value has changed, but the phase is lower than
                # draft, set it back to draft since it hasn't been exposed
                # anywhere.
                if commit.rev != node:
                    ctx = self.repo[node]
                    if ctx.phase() < phases.draft:
                        phases.registernew(self.repo, tr, phases.draft,
                                           [ctx.node()])

            text = "(octopus merge fixup)\n"
            p2 = node

        if self.filemapmode and nparents == 1:
            mfl = self.repo.manifestlog
            mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
            closed = "close" in commit.extra
            if not closed and not mfl[m1node].read().diff(mfl[mnode].read()):
                self.ui.status(_("filtering out empty revision\n"))
                self.repo.rollback(force=True)
                return parent
        return p2
Beispiel #8
0
def rewrite(repo, old, updates, head, newbases, commitopts, mutop=None):
    """Return (nodeid, created) where nodeid is the identifier of the
    changeset generated by the rewrite process, and created is True if
    nodeid was actually created. If created is False, nodeid
    references a changeset existing before the rewrite call.
    """
    wlock = lock = tr = None
    try:
        wlock = repo.wlock()
        lock = repo.lock()
        tr = repo.transaction("rewrite")
        if len(old.parents()) > 1:  # XXX remove this unnecessary limitation.
            raise error.Abort(_("cannot amend merge changesets"))
        base = old.p1()
        updatebookmarks = bookmarksupdater(repo, [old.node()] +
                                           [u.node() for u in updates])

        # commit a new version of the old changeset, including the update
        # collect all files which might be affected
        files = set(old.files())
        for u in updates:
            files.update(u.files())

        # Recompute copies (avoid recording a -> b -> a)
        copied = copies.pathcopies(base, head)

        # prune files which were reverted by the updates
        def samefile(f):
            if f in head.manifest():
                a = head.filectx(f)
                if f in base.manifest():
                    b = base.filectx(f)
                    return a.data() == b.data() and a.flags() == b.flags()
                else:
                    return False
            else:
                return f not in base.manifest()

        files = [f for f in files if not samefile(f)]
        # commit version of these files as defined by head
        headmf = head.manifest()

        def filectxfn(repo, ctx, path):
            if path in headmf:
                fctx = head[path]
                flags = fctx.flags()
                mctx = context.memfilectx(
                    repo,
                    ctx,
                    fctx.path(),
                    fctx.data(),
                    islink="l" in flags,
                    isexec="x" in flags,
                    copied=copied.get(path),
                )
                return mctx
            return None

        message = cmdutil.logmessage(repo, commitopts)
        if not message:
            message = old.description()

        user = commitopts.get("user") or old.user()
        # TODO: In case not date is given, we should take the old commit date
        # if we are working one one changeset or mimic the fold behavior about
        # date
        date = commitopts.get("date") or None
        extra = dict(commitopts.get("extra", old.extra()))
        extra["branch"] = head.branch()
        mutation.record(repo, extra, [c.node() for c in updates], mutop)
        loginfo = {
            "predecessors": " ".join(c.hex() for c in updates),
            "mutation": mutop,
        }

        new = context.memctx(
            repo,
            parents=newbases,
            text=message,
            files=files,
            filectxfn=filectxfn,
            user=user,
            date=date,
            extra=extra,
            loginfo=loginfo,
        )

        if commitopts.get("edit"):
            new._text = cmdutil.commitforceeditor(repo, new, [])
        revcount = len(repo)
        newid = repo.commitctx(new)
        new = repo[newid]
        created = len(repo) != revcount
        updatebookmarks(newid)

        tr.close()
        return newid, created
    finally:
        lockmod.release(tr, lock, wlock)
Beispiel #9
0
def synthesize(ui, repo, descpath, **opts):
    """synthesize commits based on a model of an existing repository

    The model must have been generated by :hg:`analyze`. Commits will
    be generated randomly according to the probabilities described in
    the model. If --initfiles is set, the repository will be seeded with
    the given number files following the modeled repository's directory
    structure.

    When synthesizing new content, commit descriptions, and user
    names, words will be chosen randomly from a dictionary that is
    presumed to contain one word per line. Use --dict to specify the
    path to an alternate dictionary to use.
    """
    try:
        fp = hg.openpath(ui, descpath)
    except Exception as err:
        raise error.Abort("%s: %s" % (descpath, err[0].strerror))
    desc = json.load(fp)
    fp.close()

    def cdf(l):
        if not l:
            return [], []
        vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
        t = float(sum(probs, 0))
        s, cdfs = 0, []
        for v in probs:
            s += v
            cdfs.append(s / t)
        return vals, cdfs

    lineschanged = cdf(desc["lineschanged"])
    fileschanged = cdf(desc["fileschanged"])
    filesadded = cdf(desc["filesadded"])
    dirsadded = cdf(desc["dirsadded"])
    filesremoved = cdf(desc["filesremoved"])
    linelengths = cdf(desc["linelengths"])
    parents = cdf(desc["parents"])
    p1distance = cdf(desc["p1distance"])
    p2distance = cdf(desc["p2distance"])
    interarrival = cdf(desc["interarrival"])
    linesinfilesadded = cdf(desc["linesinfilesadded"])
    tzoffset = cdf(desc["tzoffset"])

    dictfile = opts.get("dict") or "/usr/share/dict/words"
    try:
        fp = open(dictfile, "rU")
    except IOError as err:
        raise error.Abort("%s: %s" % (dictfile, err.strerror))
    words = fp.read().splitlines()
    fp.close()

    initdirs = {}
    if desc["initdirs"]:
        for k, v in desc["initdirs"]:
            initdirs[k.encode("utf-8").replace(".hg", "_hg")] = v
        initdirs = renamedirs(initdirs, words)
    initdirscdf = cdf(initdirs)

    def pick(cdf):
        return cdf[0][bisect.bisect_left(cdf[1], random.random())]

    def pickpath():
        return os.path.join(pick(initdirscdf), random.choice(words))

    def makeline(minimum=0):
        total = max(minimum, pick(linelengths))
        c, l = 0, []
        while c < total:
            w = random.choice(words)
            c += len(w) + 1
            l.append(w)
        return " ".join(l)

    wlock = repo.wlock()
    lock = repo.lock()

    nevertouch = {".hgsub", ".hgignore", ".hgtags"}

    progress = ui.progress
    _synthesizing = _("synthesizing")
    _files = _("initial files")
    _changesets = _("changesets")

    # Synthesize a single initial revision adding files to the repo according
    # to the modeled directory structure.
    initcount = int(opts["initfiles"])
    if initcount and initdirs:
        pctx = repo[None].parents()[0]
        dirs = set(pctx.dirs())
        files = {}

        def validpath(path):
            # Don't pick filenames which are already directory names.
            if path in dirs:
                return False
            # Don't pick directories which were used as file names.
            while path:
                if path in files:
                    return False
                path = os.path.dirname(path)
            return True

        for i in xrange(0, initcount):
            ui.progress(_synthesizing, i, unit=_files, total=initcount)

            path = pickpath()
            while not validpath(path):
                path = pickpath()
            data = "%s contents\n" % path
            files[path] = data
            dir = os.path.dirname(path)
            while dir and dir not in dirs:
                dirs.add(dir)
                dir = os.path.dirname(dir)

        def filectxfn(repo, memctx, path):
            return context.memfilectx(repo, memctx, path, files[path])

        ui.progress(_synthesizing, None)
        message = "synthesized wide repo with %d files" % (len(files), )
        mc = context.memctx(
            repo,
            [pctx.node(), nullid],
            message,
            pycompat.iterkeys(files),
            filectxfn,
            ui.username(),
            "%d %d" % util.makedate(),
        )
        initnode = mc.commit()
        if ui.debugflag:
            hexfn = hex
        else:
            hexfn = short
        ui.status(
            _("added commit %s with %d files\n") %
            (hexfn(initnode), len(files)))

    # Synthesize incremental revisions to the repository, adding repo depth.
    count = int(opts["count"])
    heads = set(map(repo.changelog.rev, repo.heads()))
    for i in xrange(count):
        progress(_synthesizing, i, unit=_changesets, total=count)

        node = repo.changelog.node
        revs = len(repo)

        def pickhead(heads, distance):
            if heads:
                lheads = sorted(heads)
                rev = revs - min(pick(distance), revs)
                if rev < lheads[-1]:
                    rev = lheads[bisect.bisect_left(lheads, rev)]
                else:
                    rev = lheads[-1]
                return rev, node(rev)
            return nullrev, nullid

        r1 = revs - min(pick(p1distance), revs)
        p1 = node(r1)

        # the number of heads will grow without bound if we use a pure
        # model, so artificially constrain their proliferation
        toomanyheads = len(heads) > random.randint(1, 20)
        if p2distance[0] and (pick(parents) == 2 or toomanyheads):
            r2, p2 = pickhead(heads.difference([r1]), p2distance)
        else:
            r2, p2 = nullrev, nullid

        pl = [p1, p2]
        pctx = repo[r1]
        mf = pctx.manifest()
        mfk = mf.keys()
        changes = {}
        if mfk:
            for __ in xrange(pick(fileschanged)):
                for __ in xrange(10):
                    fctx = pctx.filectx(random.choice(mfk))
                    path = fctx.path()
                    if not (path in nevertouch or fctx.isbinary()
                            or "l" in fctx.flags()):
                        break
                lines = fctx.data().splitlines()
                add, remove = pick(lineschanged)
                for __ in xrange(remove):
                    if not lines:
                        break
                    del lines[random.randrange(0, len(lines))]
                for __ in xrange(add):
                    lines.insert(random.randint(0, len(lines)), makeline())
                path = fctx.path()
                changes[path] = "\n".join(lines) + "\n"
            for __ in xrange(pick(filesremoved)):
                path = random.choice(mfk)
                for __ in xrange(10):
                    path = random.choice(mfk)
                    if path not in changes:
                        break
        if filesadded:
            dirs = list(pctx.dirs())
            dirs.insert(0, "")
        for __ in xrange(pick(filesadded)):
            pathstr = ""
            while pathstr in dirs:
                path = [random.choice(dirs)]
                if pick(dirsadded):
                    path.append(random.choice(words))
                path.append(random.choice(words))
                pathstr = "/".join(filter(None, path))
            data = ("\n".join(makeline()
                              for __ in xrange(pick(linesinfilesadded))) +
                    "\n")
            changes[pathstr] = data

        def filectxfn(repo, memctx, path):
            if path not in changes:
                return None
            return context.memfilectx(repo, memctx, path, changes[path])

        if not changes:
            continue
        if revs:
            date = repo["tip"].date()[0] + pick(interarrival)
        else:
            date = time.time() - (86400 * count)
        # dates in mercurial must be positive, fit in 32-bit signed integers.
        date = min(0x7FFFFFFF, max(0, date))
        user = random.choice(words) + "@" + random.choice(words)
        mc = context.memctx(
            repo,
            pl,
            makeline(minimum=2),
            sorted(changes),
            filectxfn,
            user,
            "%d %d" % (date, pick(tzoffset)),
        )
        newnode = mc.commit()
        heads.add(repo.changelog.rev(newnode))
        heads.discard(r1)
        heads.discard(r2)

    lock.release()
    wlock.release()
Beispiel #10
0
repo.commit(text="commit1", date="0 0")

d = repo[None]["foo"].date()
if os.name == "nt":
    d = d[:2]
print("workingfilectx.date = (%d, %d)" % d)

# test memctx with non-ASCII commit message


def filectxfn(repo, memctx, path):
    return context.memfilectx(repo, memctx, "foo", b"")


ctx = context.memctx(
    repo, ["tip", None], encoding.tolocal("Gr\xc3\xbcezi!"), ["foo"], filectxfn
)
ctx.commit()

# test performing a status


def getfilectx(repo, memctx, f):
    fctx = memctx.parents()[0][f]
    data, flags = fctx.data(), fctx.flags()
    if f == "foo":
        data += b"bar\n"
    return context.memfilectx(repo, memctx, f, data, "l" in flags, "x" in flags)


ctxa = repo.changectx(0)