예제 #1
0
파일: globalrevs.py 프로젝트: simpkins/eden
def _sqllocalrepowrapper(orig, repo):
    # This ensures that the repo is of type `sqllocalrepo` which is defined in
    # hgsql extension.
    orig(repo)

    if not extensions.isenabled(repo.ui, "globalrevs"):
        return

    # This class will effectively extend the `sqllocalrepo` class.
    class globalrevsrepo(repo.__class__):
        def commitctx(self, ctx, error=False):
            # Assign global revs automatically
            extra = dict(ctx.extra())
            extra[EXTRASGLOBALREVKEY] = str(self.nextrevisionnumber())
            ctx.extra = lambda: extra
            return super(globalrevsrepo, self).commitctx(ctx, error)

        def revisionnumberfromdb(self):
            # This must be executed while the SQL lock is taken
            if not self.hassqlwritelock():
                raise error.ProgrammingError("acquiring globalrev needs SQL write lock")

            reponame = self._globalrevsreponame
            cursor = self.sqlcursor

            cursor.execute(
                "SELECT value FROM revision_references "
                + "WHERE repo = %s AND "
                + "namespace = 'counter' AND "
                + "name='commit' ",
                (reponame,),
            )

            counterresults = cursor.fetchall()
            if len(counterresults) == 1:
                return int(counterresults[0][0])
            elif len(counterresults) == 0:
                raise error.Abort(
                    CorruptionException(
                        _("no commit counters for %s in database") % reponame
                    )
                )
            else:
                raise error.Abort(
                    CorruptionException(
                        _("multiple commit counters for %s in database") % reponame
                    )
                )

        def nextrevisionnumber(self):
            """get the next strictly increasing revision number for this
            repository.
            """

            if self._nextrevisionnumber is None:
                self._nextrevisionnumber = self.revisionnumberfromdb()

            nextrev = self._nextrevisionnumber
            self._nextrevisionnumber += 1
            return nextrev

        def transaction(self, *args, **kwargs):
            tr = super(globalrevsrepo, self).transaction(*args, **kwargs)
            if tr.count > 1:
                return tr

            def transactionabort(orig):
                self._nextrevisionnumber = None
                return orig()

            extensions.wrapfunction(tr, "_abort", transactionabort)
            return tr

        def _updaterevisionreferences(self, *args, **kwargs):
            super(globalrevsrepo, self)._updaterevisionreferences(*args, **kwargs)

            newcount = self._nextrevisionnumber

            # Only write to database if the global revision number actually
            # changed.
            if newcount is not None:
                reponame = self._globalrevsreponame
                cursor = self.sqlcursor

                cursor.execute(
                    "UPDATE revision_references "
                    + "SET value=%s "
                    + "WHERE repo=%s AND namespace='counter' AND name='commit'",
                    (newcount, reponame),
                )

    repo._globalrevsreponame = (
        repo.ui.config("globalrevs", "reponame") or repo.sqlreponame
    )
    repo._nextrevisionnumber = None
    repo.__class__ = globalrevsrepo
예제 #2
0
파일: crdump.py 프로젝트: leszfb/eden
def crdump(ui, repo, *revs, **opts):
    """
    Dump the info about the revisions in format that's friendly for sending the
    patches for code review.

    The output is a JSON list with dictionary for each specified revision: ::

        {
          "output_directory": an output directory for all temporary files
          "commits": [
          {
            "node": commit hash,
            "date": date in format [unixtime, timezone offset],
            "desc": commit message,
            "patch_file": path to file containing patch in unified diff format
                          relative to output_directory,
            "commit_cloud": true if the commit is in commit cloud,
            "files": list of files touched by commit,
            "binary_files": [
              {
                "filename": path to file relative to repo root,
                "old_file": path to file (relative to output_directory) with
                            a dump of the old version of the file,
                "new_file": path to file (relative to output_directory) with
                            a dump of the newversion of the file,
              },
              ...
            ],
            "user": commit author,
            "p1": {
              "node": hash,
              "differential_revision": xxxx
            },
            "public_base": {
              "node": public base commit hash,
              "svnrev": svn revision of public base (if hgsvn repo),
            },
            "obsolete": {
                "date": [
                    time,
                    timezone
                ],
                "flag": marker's flags,
                "metadata": {
                    "operation": changes made,
                    "user": user name
                },
                "prednode": predecessor commit in hash,
                "succnodes": [
                    successors in hash
                ]
            }
          },
          ...
          ]
        }
    """

    revs = list(revs)
    revs.extend(opts["rev"])

    if not revs:
        raise error.Abort(_("revisions must be specified"))
    revs = scmutil.revrange(repo, revs)

    if "unified" in opts:
        contextlines = opts["unified"]

    cdata = []
    outdir = tempfile.mkdtemp(suffix="hg.crdump")
    try:
        lfs = None
        if opts["lfs"]:
            try:
                lfs = extensions.find("lfs")
            except KeyError:
                pass  # lfs extension is not enabled

        notbackedup = set(repo[rev].node() for rev in revs)
        if ui.configbool("crdump", "commitcloud", False):
            try:
                oldquiet = repo.ui.quiet
                # Silence any output from commitcloud
                repo.ui.quiet = True
                notbackedup = commitcloud.backup.backup(repo, revs)[1]
            except Exception:
                # Don't let commit cloud exceptions block crdump
                pass
            finally:
                repo.ui.quiet = oldquiet

        for rev in revs:
            ctx = repo[rev]
            rdata = {
                "node": hex(ctx.node()),
                "date": list(map(int, ctx.date())),
                "desc": encoding.fromlocal(ctx.description()),
                "files": ctx.files(),
                "p1": {
                    "node": ctx.parents()[0].hex()
                },
                "user": encoding.fromlocal(ctx.user()),
                "bookmarks": list(map(encoding.fromlocal, ctx.bookmarks())),
                "commit_cloud": False if ctx.node() in notbackedup else True,
                "manifest_node": hex(ctx.manifestnode()),
            }
            if ctx.parents()[0].phase() != phases.public:
                # we need this only if parent is in the same draft stack
                rdata["p1"]["differential_revision"] = phabricatorrevision(
                    ctx.parents()[0])

            if opts["obsolete"]:
                markers = obsutil.getmarkers(repo, [ctx.node()])
                obsolete = dumpmarkers(markers)
                if obsolete:
                    rdata["obsolete"] = obsolete

            rdata["branch"] = ""

            pbctx = publicbase(repo, ctx)
            if pbctx:
                rdata["public_base"] = {"node": hex(pbctx.node())}
                try:
                    globalrevs = extensions.find("globalrevs")
                    globalrev = globalrevs.getglobalrev(ui, pbctx)
                    rdata["public_base"]["svnrev"] = globalrev
                except KeyError:
                    pass

                if extensions.isenabled(ui, "remotenames"):
                    downstreams = repo.revs("%n:: & remotebookmark()",
                                            pbctx.node())
                    downstreambookmarks = set()
                    for r in downstreams:
                        downstreambookmarks.update(
                            repo.names["hoistednames"].names(
                                repo, repo[r].node()))

                    # If there's a single downstream remotebookmark, or master is a
                    # downstream remotebookmark, report it as the current branch.
                    if downstreambookmarks:
                        if "master" in downstreambookmarks:
                            rdata["branch"] = "master"
                        elif len(downstreambookmarks) == 1:
                            rdata["branch"] = list(downstreambookmarks)[0]

            rdata["patch_file"] = dumppatch(ui, repo, ctx, outdir,
                                            contextlines)
            if not opts["nobinary"]:
                rdata["binary_files"] = dumpbinaryfiles(
                    ui, repo, ctx, outdir, lfs)
            cdata.append(rdata)

        ui.write(
            json.dumps(
                {
                    "output_directory": outdir,
                    "commits": cdata
                },
                sort_keys=True,
                indent=4,
                separators=(",", ": "),
            ))
        ui.write("\n")
    except Exception:
        shutil.rmtree(outdir)
        raise