Пример #1
0
def _getamendcopies(repo, dest, ancestor):
    db, error = opendbm(repo, "r")
    if db is None:
        return {}
    try:
        ctx = dest
        count = 0
        limit = repo.ui.configint("copytrace", "amendcopytracecommitlimit")

        # Search for the ancestor commit that has amend copytrace data.  This
        # will be the most recent amend commit if we are rebasing onto an
        # amend commit.  If we reach the common ancestor or a public commit,
        # then there is no amend copytrace data to be found.
        while ctx.node() not in db:
            ctx = ctx.p1()
            count += 1
            if ctx == ancestor or count > limit:
                return {}

        # Load the amend copytrace data from this commit.
        encoded = json.loads(db[ctx.node()])
        return dict((k.decode("base64"), v.decode("base64"))
                    for (k, v) in pycompat.iteritems(encoded))
    except Exception:
        repo.ui.log("copytrace",
                    "Failed to load amend copytrace for %s" % dest.hex())
        return {}
    finally:
        try:
            db.close()
        except error:
            pass
Пример #2
0
    def _batchrequest(self, pointers, action):
        """Get metadata about objects pointed by pointers for given action

        Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
        See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
        """
        self.ui.log("lfs_url", lfs_url=self.baseurl)
        objects = [{"oid": p.oid(), "size": p.size()} for p in pointers]
        requestdata = pycompat.encodeutf8(
            json.dumps({
                "objects": objects,
                "operation": action
            }))
        batchreq = util.urlreq.request("%s/objects/batch" % self.baseurl,
                                       data=requestdata)
        batchreq.add_header("Accept", "application/vnd.git-lfs+json")
        batchreq.add_header("Content-Type", "application/vnd.git-lfs+json")
        self._addextraheaders(batchreq)
        try:
            res = self.urlopener.open(batchreq)
            server = res.info().get("server")
            self.ui.log("lfs_server", lfs_server=server)
            rawjson = res.read()
        except util.urlerr.httperror as ex:
            raise LfsRemoteError(
                _("LFS HTTP error: %s (action=%s)") % (ex, action))
        try:
            response = json.loads(rawjson)
        except ValueError:
            raise LfsRemoteError(
                _("LFS server returns invalid JSON: %s") % rawjson)
        return response
Пример #3
0
    def _makereferences(self, data):
        """Makes a References object from JSON data

            JSON data must represent json serialization of
            //scm/commitcloud/if/CommitCloudService.thrift
            struct ReferencesData

            Result represents struct References from this module
        """
        version = data["version"]
        newheads = [h for h in data["heads"]]
        newbookmarks = {n: v for n, v in data["bookmarks"].items()}
        newobsmarkers = [(
            nodemod.bin(m["pred"]),
            tuple(nodemod.bin(s) for s in m["succs"]),
            m["flags"],
            tuple((k, v) for k, v in json.loads(m["meta"])),
            (m["date"], m["tz"]),
            tuple(nodemod.bin(p) for p in m["predparents"]),
        ) for m in data["new_obsmarkers_data"]]
        headdates = {h: d for h, d in data.get("head_dates", {}).items()}
        newremotebookmarks = self._decoderemotebookmarks(
            data.get("remote_bookmarks", []))
        newsnapshots = [s for s in data["snapshots"]]

        return References(
            version,
            newheads,
            newbookmarks,
            newobsmarkers,
            headdates,
            newremotebookmarks,
            newsnapshots,
        )
Пример #4
0
def _getprogressstep(repo):
    try:
        data = json.loads(repo.sharedvfs.open(progressfilename).read())
    except IOError as e:
        if e.errno != errno.ENOENT:
            raise
    else:
        return data.get("step")
Пример #5
0
def decodebookmarks(stream):
    sizeofjsonsize = struct.calcsize(">i")
    size = struct.unpack(">i", stream.read(sizeofjsonsize))[0]
    unicodedict = json.loads(stream.read(size))
    # python json module always returns unicode strings. We need to convert
    # it back to bytes string
    result = {}
    for bookmark, node in pycompat.iteritems(unicodedict):
        result[bookmark] = node
    return result
 def sendpost(self, request_url, data, timeout, ca_bundle):
     res = requests.post(request_url, data, verify=ca_bundle or True)
     data = json.loads(res.content)
     if res.status_code != 200:
         raise PhabricatorClientError(
             "Phabricator not available returned " + str(res.status), res)
     # Apparently both singular and plural are used.
     if "error" in data:
         raise PhabricatorClientError("Error in query", data["error"])
     if "errors" in data:
         raise PhabricatorClientError("Error in query", data["errors"])
     return data
Пример #7
0
def _executeandparse(ui, repo, target=None):
    stdout, stderr = _execute(ui, repo, target)

    # The stderr can optionally provide useful context, so print it.
    ui.write_err(stderr)

    try:
        # Prefer JSON output first.
        data = json.loads(stdout)
        if "node" in data:
            return _validaterevspec(ui, data["node"])
    except Exception:
        pass

    # Fall back to stdout:
    return _validaterevspec(ui, stdout.strip())
Пример #8
0
def _lookupstables(repo, ctx):
    ui = repo.ui

    stablesscript = ui.config("stablerev", "stablesscript")
    if stablesscript is None:
        raise error.ConfigError(_("must set stablerev.stablesscript"))

    stablesscript = stablesscript.format(nodeid=util.shellquote(ctx.hex()))

    stdout = _executescript(stablesscript, repo)

    try:
        committostables = json.loads(stdout)
    except Exception as e:
        raise error.Abort(
            _("couldn't parse stablesscript stdout as json: %s") % e)

    return committostables.get(ctx.hex(), [])
Пример #9
0
def _executeandparse(ui, repo, target=None):
    script = ui.config("stablerev", "script")
    if script is None:
        raise error.ConfigError(_("must set stablerev.script"))

    # Pass '--target $TARGET' for compatibility.
    # XXX: Remove this once the new code has been rolled out for some time.
    env = {}
    if target is not None:
        script += " --target %s" % util.shellquote(target)
        env["TARGET"] = target

    stdout = _executescript(script, repo, env)

    try:
        # Prefer JSON output first.
        data = json.loads(stdout)
        if "node" in data:
            return _validaterevspec(ui, data["node"])
    except Exception:
        pass

    # Fall back to stdout:
    return _validaterevspec(ui, stdout.strip())
Пример #10
0
def revsetdiff(repo, diffid):
    """Return a set of revisions corresponding to a given Differential ID """

    repo_callsign = repo.ui.config("phrevset", "callsign")
    if repo_callsign is None:
        msg = _("phrevset.callsign is not set - doing a linear search\n")
        hint = _("This will be slow if the diff was not committed recently\n")
        repo.ui.warn(msg)
        repo.ui.warn(hint)
        rev = finddiff(repo, diffid)
        if rev is None:
            raise error.Abort("Could not find diff D%s in changelog" % diffid)
        else:
            return [rev]

    revs, resp = forksearch(repo, diffid)

    if revs is not None:
        # The log walk found the diff, nothing more to do
        return revs

    if resp is None:
        # The graphql query finished but didn't return anything
        return []

    vcs = resp["source_control_system"]

    repo.ui.debug("[diffrev] VCS is %s\n" % vcs)

    if vcs == "git":
        gitrev = parsedesc(repo, resp, ignoreparsefailure=False)
        repo.ui.debug("[diffrev] GIT rev is %s\n" % gitrev)

        peerpath = repo.ui.expandpath("default")
        remoterepo = hg.peer(repo, {}, peerpath)
        remoterev = remoterepo.lookup("_gitlookup_git_%s" % gitrev)

        repo.ui.debug("[diffrev] HG rev is %s\n" % remoterev.encode("hex"))
        if not remoterev:
            repo.ui.debug("[diffrev] Falling back to linear search\n")
            linear_search_result = finddiff(repo, diffid)
            if linear_search_result is None:
                # walked the entire repo and couldn't find the diff
                raise error.Abort("Could not find diff D%s in changelog" % diffid)

            return [linear_search_result]

        return [repo[remoterev].rev()]

    elif vcs == "hg":
        rev = parsedesc(repo, resp, ignoreparsefailure=True)
        if rev:
            # The response from phabricator contains a changeset ID.
            # Convert it back to a rev number.
            try:
                return [repo[rev].rev()]
            except error.RepoLookupError:
                # TODO: 's/svnrev/globalrev' after turning off Subversion
                # servers. We will know about this when we remove the `svnrev`
                # revset.
                #
                # Unfortunately the rev can also be a svnrev/globalrev :(.
                if rev.isdigit():
                    try:
                        return [r for r in repo.revs("svnrev(%s)" % rev)]
                    except error.RepoLookupError:
                        pass

                raise error.Abort(
                    "Landed commit for diff D%s not available "
                    'in current repository: run "hg pull" '
                    "to retrieve it" % diffid
                )

        # commit is still local, get its hash

        props = resp["phabricator_version_properties"]["edges"]
        commits = []
        for prop in props:
            if prop["node"]["property_name"] == "local:commits":
                commits = json.loads(prop["node"]["property_value"])

        revs = [c["commit"] for c in commits.values()]

        # verify all revisions exist in the current repo; if not, try to
        # find their counterpart by parsing the log
        results = set()
        for rev in revs:
            try:
                unfiltered = repo.unfiltered()
                node = unfiltered[rev]
            except error.RepoLookupError:
                raise error.Abort(
                    "Commit %s corresponding to D%s\n not found in the repo"
                    % (rev, diffid)
                )
            successors = list(repo.revs("last(successors(%n))", node.node()))
            if len(successors) != 1:
                results.add(node.rev())
            else:
                results.add(successors[0])

        if not results:
            raise error.Abort("Could not find local commit for D%s" % diffid)

        return set(results)

    else:
        if not vcs:
            msg = (
                "D%s does not have an associated version control system\n"
                "You can view the diff at https:///our.internmc.facebook.com/intern/diff/D%s\n"
            )
            repo.ui.warn(msg % (diffid, diffid))

            return []
        else:
            raise error.Abort(
                "Conduit returned unknown " 'sourceControlSystem "%s"' % vcs
            )
Пример #11
0
def _amend(orig, ui, repo, old, extra, pats, opts):
    """Wraps amend to collect copytrace data on amend

    If a file is created in one commit, modified in a subsequent commit, and
    then renamed or copied by amending the original commit, restacking the
    commits that modify the file will fail:

    file modified here    B     B'  restack of B to B' will fail
                          |     :
    file created here     A --> A'  file renamed in amended commit
                          |    /
                          o --

    This function collects information about copies and renames from amend
    commits, and saves it for use during rebases onto the amend commit.  This
    lets rebases onto files that been renamed or copied in an amend commit
    work without conflicts.

    This function collects the copytrace information from the working copy and
    stores it against the amended commit in a separate dbm file. Later,
    in _domergecopies, this information will be merged with the rebase
    copytrace data to incorporate renames and copies made during the amend.
    """

    # Check if amend copytracing has been disabled.
    if not ui.configbool("copytrace", "enableamendcopytrace"):
        return orig(ui, repo, old, extra, pats, opts)

    # Need to get the amend-copies before calling the command because files from
    # the working copy will be used during the amend.
    wctx = repo[None]

    # Find the amend-copies.
    matcher = scmutil.match(wctx, pats, opts)
    amend_copies = copiesmod.pathcopies(old, wctx, matcher)

    # Finally, invoke the command.
    node = orig(ui, repo, old, extra, pats, opts)
    amended_ctx = repo[node]

    # Store the amend-copies against the amended context.
    if amend_copies:
        db, error = opendbm(repo, "c")
        if db is None:
            # Database locked, can't record these amend-copies.
            ui.log("copytrace", "Failed to open amendcopytrace db: %s" % error)
            return node

        # Merge in any existing amend copies from any previous amends.
        try:
            orig_data = db[old.node()]
        except KeyError:
            orig_data = "{}"
        except error as e:
            ui.log(
                "copytrace",
                "Failed to read key %s from amendcopytrace db: %s" %
                (old.hex(), e),
            )
            return node

        orig_encoded = json.loads(orig_data)
        orig_amend_copies = dict(
            (
                pycompat.decodeutf8(
                    codecs.decode(pycompat.encodeutf8(k), "base64")),
                pycompat.decodeutf8(
                    codecs.decode(pycompat.encodeutf8(v), "base64")),
            ) for (k, v) in pycompat.iteritems(orig_encoded))

        # Copytrace information is not valid if it refers to a file that
        # doesn't exist in a commit.  We need to update or remove entries
        # that refer to files that might have only existed in the previous
        # amend commit.
        #
        # Find chained copies and renames (a -> b -> c) and collapse them to
        # (a -> c).  Delete the entry for b if this was a rename.
        for dst, src in pycompat.iteritems(amend_copies):
            if src in orig_amend_copies:
                amend_copies[dst] = orig_amend_copies[src]
                if src not in amended_ctx:
                    del orig_amend_copies[src]

        # Copy any left over copies from the previous context.
        for dst, src in pycompat.iteritems(orig_amend_copies):
            if dst not in amend_copies:
                amend_copies[dst] = src

        # Write out the entry for the new amend commit.
        encoded = dict(
            (
                pycompat.decodeutf8(
                    codecs.encode(pycompat.encodeutf8(k), "base64")),
                pycompat.decodeutf8(
                    codecs.encode(pycompat.encodeutf8(v), "base64")),
            ) for (k, v) in pycompat.iteritems(amend_copies))
        db[node] = json.dumps(encoded)
        try:
            db.close()
        except Exception as e:
            # Database corruption.  Not much we can do, so just log.
            ui.log("copytrace", "Failed to close amendcopytrace db: %s" % e)

    return node
Пример #12
0
def metaedit(ui, repo, templ, *revs, **opts):
    """edit commit message and other metadata

    Edit commit message for the current commit. By default, opens your default
    editor so that you can edit the commit message interactively. Specify -m
    to specify the commit message on the command line.

    To edit the message for a different commit, specify -r. To edit the
    messages of multiple commits, specify --batch.

    You can edit other pieces of commit metadata, namely the user or date,
    by specifying -u or -d, respectively. The expected format for user is
    'Full Name <*****@*****.**>'.

    There is also automation-friendly JSON input mode which allows the caller
    to provide the mapping between commit and new message and username in the
    following format:

        {
            "<commit_hash>": {
                "message": "<message>",
                "user": "******" // optional
            }
        }

    .. note::

        You can specify --fold to fold multiple revisions into one when the
        given revisions form a linear unbroken chain. However, :hg:`fold` is
        the preferred command for this purpose. See :hg:`help fold` for more
        information.

    .. container:: verbose

     Some examples:

     - Edit the commit message for the current commit::

         hg metaedit

     - Change the username for the current commit::

         hg metaedit --user 'New User <*****@*****.**>'

    """
    revs = list(revs)
    revs.extend(opts["rev"])
    if not revs:
        if opts["fold"]:
            raise error.Abort(_("revisions must be specified with --fold"))
        revs = ["."]

    with repo.wlock(), repo.lock():
        revs = scmutil.revrange(repo, revs)
        msgmap = {
        }  # {node: message}, predefined messages, currently used by --batch
        usermap = {
        }  # {node: author}, predefined authors, used by --jsoninputfile

        if opts["fold"]:
            root, head = fold._foldcheck(repo, revs)
        else:
            if repo.revs("%ld and public()", revs):
                raise error.Abort(
                    _("cannot edit commit information for public "
                      "revisions"))
            root = head = repo[revs.first()]

        wctx = repo[None]
        p1 = wctx.p1()
        tr = repo.transaction("metaedit")
        newp1 = None
        try:
            commitopts = opts.copy()
            allctx = [repo[r] for r in revs]
            jsoninputfile = None

            if any(
                    commitopts.get(name)
                    for name in ["message", "logfile", "reuse_message"]):
                commitopts["edit"] = False
            else:
                if opts["fold"]:
                    msgs = [
                        _("HG: This is a fold of %d changesets.") % len(allctx)
                    ]
                    msgs += [
                        _("HG: Commit message of %s.\n\n%s\n") %
                        (nodemod.short(c.node()), c.description())
                        for c in allctx
                    ]
                else:
                    if opts["batch"] and len(revs) > 1:
                        msgmap = editmessages(repo, revs)

                    msgs = [head.description()]
                    jsoninputfile = opts.get("json_input_file")
                    if jsoninputfile:
                        try:
                            if cmdutil.isstdiofilename(jsoninputfile):
                                inputjson = pycompat.decodeutf8(ui.fin.read())
                            else:
                                inputjson = pycompat.decodeutf8(
                                    util.readfile(jsoninputfile))
                            msgusermap = json.loads(inputjson)
                        except IOError as inst:
                            raise error.Abort(
                                _("can't read JSON input file '%s': %s") %
                                (jsoninputfile,
                                 encoding.strtolocal(inst.strerror)))
                        except ValueError as inst:
                            raise error.Abort(
                                _("can't decode JSON input file '%s': %s") %
                                (jsoninputfile, str(inst)))

                        if not isinstance(msgusermap, dict):
                            raise error.Abort(
                                _("JSON input is not a dictionary (see --help for input format)"
                                  ))

                        try:
                            msgmap = {
                                bin(node): msguser.get("message")
                                for (node, msguser) in msgusermap.items()
                                if "message" in msguser
                            }

                            usermap = {
                                bin(node): msguser.get("user")
                                for (node, msguser) in msgusermap.items()
                                if "user" in msguser
                            }
                        except TypeError:
                            raise error.Abort(_("invalid JSON input"))

                commitopts["message"] = "\n".join(msgs)
                commitopts["edit"] = True

            if root == head:
                # fast path: use metarewrite
                replacemap = {}
                # adding commitopts to the revisions to metaedit
                allctxopt = [{
                    "ctx": ctx,
                    "commitopts": commitopts
                } for ctx in allctx]
                # all descendats that can be safely rewritten
                newunstable = common.newunstable(repo, revs)
                newunstableopt = [{
                    "ctx": ctx
                } for ctx in [repo[r] for r in newunstable]]
                # we need to edit descendants with the given revisions to not to
                # corrupt the stacks
                if _histediting(repo):
                    ui.note(
                        _("during histedit, the descendants of "
                          "the edited commit weren't auto-rebased\n"))
                else:
                    allctxopt += newunstableopt
                # we need topological order for all
                if mutation.enabled(repo):
                    allctxopt = mutation.toposort(
                        repo,
                        allctxopt,
                        nodefn=lambda copt: copt["ctx"].node())
                else:
                    allctxopt = sorted(allctxopt,
                                       key=lambda copt: copt["ctx"].rev())

                def _rewritesingle(c, _commitopts):
                    # Predefined message overrides other message editing choices.
                    msg = msgmap.get(c.node())
                    if jsoninputfile:
                        _commitopts["edit"] = False
                    if msg is not None:
                        _commitopts["message"] = msg
                        _commitopts["edit"] = False
                    user = usermap.get(c.node())
                    if user is not None:
                        _commitopts["user"] = user
                    if _commitopts.get("edit", False):
                        msg = "HG: Commit message of changeset %s\n%s" % (
                            str(c),
                            c.description(),
                        )
                        _commitopts["message"] = msg
                    bases = [
                        replacemap.get(c.p1().node(),
                                       c.p1().node()),
                        replacemap.get(c.p2().node(),
                                       c.p2().node()),
                    ]
                    newid, created = common.metarewrite(repo,
                                                        c,
                                                        bases,
                                                        commitopts=_commitopts)
                    if created:
                        replacemap[c.node()] = newid

                for copt in allctxopt:
                    _rewritesingle(
                        copt["ctx"],
                        copt.get("commitopts",
                                 {"date": commitopts.get("date") or None}),
                    )

                if p1.node() in replacemap:
                    repo.setparents(replacemap[p1.node()])
                if len(replacemap) > 0:
                    mapping = dict(
                        map(
                            lambda oldnew: (oldnew[0], [oldnew[1]]),
                            pycompat.iteritems(replacemap),
                        ))
                    templ.setprop("nodereplacements", mapping)
                    scmutil.cleanupnodes(repo, mapping, "metaedit")
                    # TODO: set poroper phase boundaries (affects secret
                    # phase only)
                else:
                    ui.status(_("nothing changed\n"))
                    return 1
            else:
                # slow path: create a new commit
                targetphase = max(c.phase() for c in allctx)

                # TODO: if the author and message are the same, don't create a
                # new hash. Right now we create a new hash because the date can
                # be different.
                newid, created = common.rewrite(
                    repo,
                    root,
                    allctx,
                    head,
                    [root.p1().node(), root.p2().node()],
                    commitopts=commitopts,
                    mutop="metaedit",
                )
                if created:
                    if p1.rev() in revs:
                        newp1 = newid
                    phases.retractboundary(repo, tr, targetphase, [newid])
                    mapping = dict([(repo[rev].node(), [newid])
                                    for rev in revs])
                    templ.setprop("nodereplacements", mapping)
                    scmutil.cleanupnodes(repo, mapping, "metaedit")
                else:
                    ui.status(_("nothing changed\n"))
                    return 1
            tr.close()
        finally:
            tr.release()

        if opts["fold"]:
            ui.status(_("%i changesets folded\n") % len(revs))
        if newp1 is not None:
            hg.update(repo, newp1)
Пример #13
0
 def deserialize(cls, string):
     return json.loads(pycompat.decodeutf8(string))
Пример #14
0
 def deserialize(cls, string):
     return json.loads(string)
Пример #15
0
 def fromjson(cls, jsonstr):
     d = json.loads(jsonstr)
     return cls._fromdict(d)
Пример #16
0
def diffidtonode(repo, diffid):
    """Return node that matches a given Differential ID or None.

    The node might exist or not exist in the repo.
    This function does not raise.
    """

    repo_callsigns = repo.ui.configlist("phrevset", "callsign")
    if not repo_callsigns:
        msg = _("phrevset.callsign is not set - doing a linear search\n")
        hint = _("This will be slow if the diff was not committed recently\n")
        repo.ui.warn(msg)
        repo.ui.warn(hint)
        node = localgetdiff(repo, diffid)
        if node is None:
            repo.ui.warn(_("Could not find diff D%s in changelog\n") % diffid)
        return node

    node, resp = search(repo, diffid)

    if node is not None:
        # The log walk found the diff, nothing more to do
        return node

    if resp is None:
        # The graphql query finished but didn't return anything
        return None

    vcs = resp.get("source_control_system")
    localreponame = repo.ui.config("remotefilelog", "reponame")
    diffreponame = None
    repository = resp.get("repository")
    if repository is not None:
        diffreponame = repository.get("scm_name")
    if diffreponame in repo.ui.configlist("phrevset", "aliases"):
        diffreponame = localreponame

    if not util.istest() and (diffreponame != localreponame):
        raise error.Abort(
            "D%s is for repo '%s', not this repo ('%s')"
            % (diffid, diffreponame, localreponame)
        )

    repo.ui.debug("[diffrev] VCS is %s\n" % vcs)

    if vcs == "git":
        gitrev = parsedesc(repo, resp, ignoreparsefailure=False)
        repo.ui.debug("[diffrev] GIT rev is %s\n" % gitrev)

        peerpath = repo.ui.expandpath("default")
        remoterepo = hg.peer(repo, {}, peerpath)
        remoterev = remoterepo.lookup("_gitlookup_git_%s" % gitrev)

        repo.ui.debug("[diffrev] HG rev is %s\n" % hex(remoterev))
        if not remoterev:
            repo.ui.debug("[diffrev] Falling back to linear search\n")
            node = localgetdiff(repo, diffid)
            if node is None:
                repo.ui.warn(_("Could not find diff D%s in changelog\n") % diffid)

            return node

        return remoterev

    elif vcs == "hg":
        rev = parsedesc(repo, resp, ignoreparsefailure=True)
        if rev:
            # The response from phabricator contains a changeset ID.
            # Convert it back to a node.
            try:
                return repo[rev].node()
            except error.RepoLookupError:
                # TODO: 's/svnrev/globalrev' after turning off Subversion
                # servers. We will know about this when we remove the `svnrev`
                # revset.
                #
                # Unfortunately the rev can also be a svnrev/globalrev :(.
                if rev.isdigit():
                    try:
                        return list(repo.nodes("svnrev(%s)" % rev))[0]
                    except (IndexError, error.RepoLookupError):
                        pass

                if len(rev) == len(nullhex):
                    return bin(rev)
                else:
                    return None

        # commit is still local, get its hash

        try:
            props = resp["phabricator_version_properties"]["edges"]
            commits = {}
            for prop in props:
                if prop["node"]["property_name"] == "local:commits":
                    commits = json.loads(prop["node"]["property_value"])
            hexnodes = [c["commit"] for c in commits.values()]
        except (AttributeError, IndexError, KeyError):
            hexnodes = []

        # find a better alternative of the commit hash specified in
        # graphql response by looking up successors.
        for hexnode in hexnodes:
            if len(hexnode) != len(nullhex):
                continue

            node = bin(hexnode)
            unfi = repo
            if node in unfi:
                # Find a successor.
                successors = list(
                    unfi.nodes("last(successors(%n)-%n-obsolete())", node, node)
                )
                if successors:
                    return successors[0]
            return node

        # local:commits is empty
        return None

    else:
        if not vcs:
            msg = (
                "D%s does not have an associated version control system\n"
                "You can view the diff at https:///our.internmc.facebook.com/intern/diff/D%s\n"
            )
            repo.ui.warn(msg % (diffid, diffid))

            return None
        else:
            repo.ui.warn(
                _("Conduit returned unknown sourceControlSystem: '%s'\n") % vcs
            )

            return None
Пример #17
0
    def _processrevisioninfo(self, ret):
        try:
            errormsg = None
            if "error" in ret:
                errormsg = ret["error"]
            if "errors" in ret:
                errormsg = ret["errors"][0]["message"]
            if errormsg is not None:
                raise ClientError(None, errormsg)
        except (KeyError, TypeError):
            pass

        infos = {}
        try:
            nodes = ret["data"]["query"][0]["results"]["nodes"]
            for node in nodes:
                info = {}
                infos[str(node["number"])] = info

                status = node["diff_status_name"]
                # GraphQL uses "Closed" but Conduit used "Committed" so let's
                # not change the naming
                if status == "Closed":
                    status = "Committed"
                info["status"] = status
                info["created"] = node["created_time"]
                info["updated"] = node["updated_time"]
                info["is_landing"] = node["is_landing"]
                info["land_job_status"] = node["land_job_status"]
                info["needs_final_review_status"] = node["needs_final_review_status"]

                info["signal_status"] = None
                if (
                    # signal_summary can be:
                    # 1. missing; 2. present, "None" (ex. D17868094)
                    node.get("signal_summary")
                    and "signals_status" in node["signal_summary"]
                ):
                    info["signal_status"] = (
                        node["signal_summary"]["signals_status"]
                        .title()
                        .replace("_", " ")
                    )

                active_diff = None
                if (
                    "latest_active_diff" in node
                    and node["latest_active_diff"] is not None
                ):
                    active_diff = node["latest_active_diff"]

                if (
                    "latest_publishable_draft_phabricator_version" in node
                    and node["latest_publishable_draft_phabricator_version"] is not None
                ):
                    active_diff = node["latest_publishable_draft_phabricator_version"]

                if active_diff is None:
                    continue

                info["count"] = node["differential_diffs"]["count"]

                localcommitnode = active_diff["local_commit_info"]["nodes"]
                if localcommitnode is not None and len(localcommitnode) == 1:
                    localcommits = json.loads(localcommitnode[0]["property_value"])

                    if not isinstance(localcommits, dict):
                        continue

                    localcommits = sorted(
                        localcommits.values(),
                        key=operator.itemgetter("time"),
                        reverse=True,
                    )
                    info["hash"] = localcommits[0].get("commit", None)

        except (AttributeError, KeyError, TypeError):
            raise ClientError(None, "Unexpected graphql response format")

        return infos