Beispiel #1
0
    def _send(self, path, data):
        e = None
        rdata = None
        # print request if debugrequests and debug are both on
        if self.debugrequests:
            self.ui.debug("%s\n" % json.dumps(cleandict(data)))
        if self._getheader("Content-Encoding") == "gzip":
            buffer = util.stringio()
            with gzip.GzipFile(fileobj=buffer, mode="w") as compressed:
                compressed.write(pycompat.encodeutf8(json.dumps(data)))
                compressed.flush()
            rdata = buffer.getvalue()
        else:
            rdata = pycompat.encodeutf8(json.dumps(data))

        # exponential backoff here on failure, 1s, 2s, 4s, 8s, 16s etc
        sl = 1

        for attempt in range(MAX_CONNECT_RETRIES):
            try:
                self.connection.request("POST", path, rdata, self.headers)
                resp = self.connection.getresponse()
                if resp.status == httplib.UNAUTHORIZED:
                    raise ccerror.RegistrationError(self.ui, _("unauthorized client"))
                if resp.status == httplib.FORBIDDEN:
                    raise ccerror.RegistrationError(self.ui, _("forbidden client"))
                if resp.status == httplib.BAD_REQUEST:
                    raise ccerror.BadRequestError(self.ui, resp.reason)
                if resp.status != httplib.OK:
                    raise ccerror.ServiceError(
                        self.ui, "%d %s" % (resp.status, resp.reason)
                    )
                if resp.getheader("Content-Encoding") == "gzip":
                    resp = gzip.GzipFile(fileobj=util.stringio(resp.read()))
                data = json.load(resp)
                # print response if debugrequests and debug are both on
                if self.debugrequests:
                    self.ui.debug("%s\n" % json.dumps(cleandict(data)))
                if "error" in data:
                    raise ccerror.ServiceError(self.ui, data["error"])
                return data
            except httplib.HTTPException:
                self.connection.close()
                self.connection.connect()
            except (socket.timeout, socket.gaierror) as e:
                raise error.Abort(
                    _("network error: %s") % e, hint=_("check your network connection")
                )
            except socket.error as e:
                if "SSL" in str(e):
                    raise ccerror.TLSAccessError(self.ui, str(e))
                raise ccerror.ServiceError(self.ui, str(e))
            except ssl.CertificateError as e:
                raise ccerror.TLSAccessError(self.ui, str(e))
            time.sleep(sl)
            sl *= 2
        if e:
            raise ccerror.ServiceError(self.ui, str(e))
Beispiel #2
0
def graphqlgetdiff(repo, diffid):
    """Resolves a phabricator Diff number to a commit hash of it's latest version"""
    if util.istest():
        hexnode = repo.ui.config("phrevset", "mock-D%s" % diffid)
        if hexnode:
            return {
                "source_control_system": "hg",
                "description": "Commit rCALLSIGN{}".format(hexnode),
                "phabricator_version_properties": {
                    "edges": [
                        {
                            "node": {
                                "property_name": "local:commits",
                                "property_value": json.dumps(
                                    {hexnode: {"commit": hexnode, "rev": hexnode}}
                                ),
                            }
                        }
                    ]
                },
            }
    timeout = repo.ui.configint("ssl", "timeout", 10)
    ca_certs = repo.ui.configpath("web", "cacerts")
    try:
        client = graphql.Client(
            repodir=pycompat.getcwd(), ca_bundle=ca_certs, repo=repo
        )
        return client.getdifflatestversion(timeout, diffid)
    except Exception as e:
        raise error.Abort(
            "Could not call phabricator graphql API: %s" % e,
            hint="perhaps you need to connect to the VPN or run 'jf auth'?",
        )
Beispiel #3
0
 def getmirroredrev(self,
                    fromrepo,
                    fromtype,
                    torepo,
                    totype,
                    rev,
                    timeout=15):
     """Transale a single rev to other repo/type
     """
     query = self._getmirroredrevsquery()
     params = {
         "params": {
             "caller_info": "hgext.exlib.phabricator.getmirroredrev",
             "from_repo": fromrepo,
             "from_scm_type": fromtype,
             "to_repo": torepo,
             "to_scm_type": totype,
             "revs": [rev],
         }
     }
     ret = self._client.query(timeout, query, json.dumps(params))
     self._raise_errors(ret)
     for pair in ret["data"]["query"]["rev_map"]:
         if pair["from_rev"] == rev:
             return pair["to_rev"]
     return ""
Beispiel #4
0
    def getmirroredrevmap(self, repo, nodes, fromtype, totype, timeout=15):
        """Return a mapping {node: node}

        Example:

            getmirroredrevmap(repo, [gitnode1, gitnode2],"git", "hg")
            # => {gitnode1: hgnode1, gitnode2: hgnode2}
        """
        reponame = repo.ui.config("fbscmquery", "reponame")
        if not reponame:
            return {}

        fromenc, fromdec = _getencodedecodefromcommittype(fromtype)
        _toenc, todec = _getencodedecodefromcommittype(totype)

        query = self._getmirroredrevsquery()
        params = {
            "params": {
                "caller_info": "hgext.exlib.phabricator.getmirroredrevmap",
                "from_repo": reponame,
                "from_scm_type": fromtype,
                "to_repo": reponame,
                "to_scm_type": totype,
                "revs": list(map(fromenc, nodes)),
            }
        }
        ret = self._client.query(timeout, query, json.dumps(params))
        self._raise_errors(ret)
        result = {}
        for pair in ret["data"]["query"]["rev_map"]:
            result[fromdec(pair["from_rev"])] = todec(pair["to_rev"])
        return result
Beispiel #5
0
    def _batchrequest(self, pointers, action):
        """Get metadata about objects pointed by pointers for given action

        Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
        See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
        """
        self.ui.log("lfs_url", lfs_url=self.baseurl)
        objects = [{"oid": p.oid(), "size": p.size()} for p in pointers]
        requestdata = pycompat.encodeutf8(
            json.dumps({
                "objects": objects,
                "operation": action
            }))
        batchreq = util.urlreq.request("%s/objects/batch" % self.baseurl,
                                       data=requestdata)
        batchreq.add_header("Accept", "application/vnd.git-lfs+json")
        batchreq.add_header("Content-Type", "application/vnd.git-lfs+json")
        self._addextraheaders(batchreq)
        try:
            res = self.urlopener.open(batchreq)
            server = res.info().get("server")
            self.ui.log("lfs_server", lfs_server=server)
            rawjson = res.read()
        except util.urlerr.httperror as ex:
            raise LfsRemoteError(
                _("LFS HTTP error: %s (action=%s)") % (ex, action))
        try:
            response = json.loads(rawjson)
        except ValueError:
            raise LfsRemoteError(
                _("LFS server returns invalid JSON: %s") % rawjson)
        return response
Beispiel #6
0
def encodebookmarks(bookmarks):
    encoded = {}
    for bookmark, node in pycompat.iteritems(bookmarks):
        encoded[bookmark] = node
    dumped = pycompat.encodeutf8(json.dumps(encoded))
    result = struct.pack(">i", len(dumped)) + dumped
    return result
Beispiel #7
0
 def _encodedmarkers(self, obsmarkers):
     # pred, succs, flags, metadata, date, parents = marker
     return [{
         "pred": nodemod.hex(m[0]),
         "succs": [nodemod.hex(s) for s in m[1]],
         "predparents": [nodemod.hex(p) for p in m[5]] if m[5] else [],
         "flags": m[2],
         "date": float(repr(m[4][0])),
         "tz": m[4][1],
         "meta": json.dumps(m[3]),
     } for m in obsmarkers]
Beispiel #8
0
 def _saveworkspaces(self, data):
     filename = os.path.join(self.path, "workspacesdata")
     with open(filename, "wb") as f:
         f.write(
             pycompat.encodeutf8(
                 json.dumps({
                     "workspaces_data": {
                         "workspaces": [{
                             "name": item.name,
                             "archived": item.archived,
                             "version": item.version,
                         } for item in data]
                     }
                 })))
Beispiel #9
0
    def scmquery_log(
        self,
        repo,
        scm_type,
        rev,
        file_paths=None,
        number=None,
        skip=None,
        exclude_rev_and_ancestors=None,
        before_timestamp=None,
        after_timestamp=None,
        timeout=10,
        use_mutable_history=False,
    ):
        """List commits from the repo meeting given criteria.

        Returns list of hashes.
        """
        query = """
            query ScmQueryLogV2(
                $params: SCMQueryServiceLogParams!
            ) {
                query: scmquery_service_log(params: $params) {
                    hash,
                }
            }
        """
        params = {
            "params": {
                "caller_info": "hgext.extlib.phabricator.graphql.scmquery_log",
                "repo": repo,
                "scm_type": scm_type,
                "rev": rev,
                "file_paths": file_paths,
                "number": number,
                "skip": skip,
                "exclude_rev_and_ancestors": exclude_rev_and_ancestors,
                "before_timestamp": before_timestamp,
                "after_timestamp": after_timestamp,
                "follow_mutable_file_history": use_mutable_history,
            }
        }
        ret = self._client.query(timeout, query, json.dumps(params))
        self._raise_errors(ret)
        return ret["data"]["query"]
Beispiel #10
0
 def tojson(self):
     return json.dumps(self._todict())
Beispiel #11
0
 def _save(self, data):
     filename = os.path.join(self.path, "commitcloudservicedb")
     with open(filename, "wb") as f:
         f.write(pycompat.encodeutf8(json.dumps(data)))
Beispiel #12
0
def _amend(orig, ui, repo, old, extra, pats, opts):
    """Wraps amend to collect copytrace data on amend

    If a file is created in one commit, modified in a subsequent commit, and
    then renamed or copied by amending the original commit, restacking the
    commits that modify the file will fail:

    file modified here    B     B'  restack of B to B' will fail
                          |     :
    file created here     A --> A'  file renamed in amended commit
                          |    /
                          o --

    This function collects information about copies and renames from amend
    commits, and saves it for use during rebases onto the amend commit.  This
    lets rebases onto files that been renamed or copied in an amend commit
    work without conflicts.

    This function collects the copytrace information from the working copy and
    stores it against the amended commit in a separate dbm file. Later,
    in _domergecopies, this information will be merged with the rebase
    copytrace data to incorporate renames and copies made during the amend.
    """

    # Check if amend copytracing has been disabled.
    if not ui.configbool("copytrace", "enableamendcopytrace"):
        return orig(ui, repo, old, extra, pats, opts)

    # Need to get the amend-copies before calling the command because files from
    # the working copy will be used during the amend.
    wctx = repo[None]

    # Find the amend-copies.
    matcher = scmutil.match(wctx, pats, opts)
    amend_copies = copiesmod.pathcopies(old, wctx, matcher)

    # Finally, invoke the command.
    node = orig(ui, repo, old, extra, pats, opts)
    amended_ctx = repo[node]

    # Store the amend-copies against the amended context.
    if amend_copies:
        db, error = opendbm(repo, "c")
        if db is None:
            # Database locked, can't record these amend-copies.
            ui.log("copytrace", "Failed to open amendcopytrace db: %s" % error)
            return node

        # Merge in any existing amend copies from any previous amends.
        try:
            orig_data = db[old.node()]
        except KeyError:
            orig_data = "{}"
        except error as e:
            ui.log(
                "copytrace",
                "Failed to read key %s from amendcopytrace db: %s" %
                (old.hex(), e),
            )
            return node

        orig_encoded = json.loads(orig_data)
        orig_amend_copies = dict(
            (
                pycompat.decodeutf8(
                    codecs.decode(pycompat.encodeutf8(k), "base64")),
                pycompat.decodeutf8(
                    codecs.decode(pycompat.encodeutf8(v), "base64")),
            ) for (k, v) in pycompat.iteritems(orig_encoded))

        # Copytrace information is not valid if it refers to a file that
        # doesn't exist in a commit.  We need to update or remove entries
        # that refer to files that might have only existed in the previous
        # amend commit.
        #
        # Find chained copies and renames (a -> b -> c) and collapse them to
        # (a -> c).  Delete the entry for b if this was a rename.
        for dst, src in pycompat.iteritems(amend_copies):
            if src in orig_amend_copies:
                amend_copies[dst] = orig_amend_copies[src]
                if src not in amended_ctx:
                    del orig_amend_copies[src]

        # Copy any left over copies from the previous context.
        for dst, src in pycompat.iteritems(orig_amend_copies):
            if dst not in amend_copies:
                amend_copies[dst] = src

        # Write out the entry for the new amend commit.
        encoded = dict(
            (
                pycompat.decodeutf8(
                    codecs.encode(pycompat.encodeutf8(k), "base64")),
                pycompat.decodeutf8(
                    codecs.encode(pycompat.encodeutf8(v), "base64")),
            ) for (k, v) in pycompat.iteritems(amend_copies))
        db[node] = json.dumps(encoded)
        try:
            db.close()
        except Exception as e:
            # Database corruption.  Not much we can do, so just log.
            ui.log("copytrace", "Failed to close amendcopytrace db: %s" % e)

    return node
Beispiel #13
0
 def serialize(cls, input):
     return pycompat.encodeutf8(json.dumps(input))
Beispiel #14
0
def progress(repo, step, **kwargs):
    with repo.sharedvfs.open(progressfilename, "wb", atomictemp=True) as f:
        data = {"step": str(step), "data": kwargs}
        f.write(pycompat.encodeutf8(json.dumps(data)))
Beispiel #15
0
 def _command(self, *args):
     with util.traced("watchman-command",
                      args=json.dumps(args[1:])) as span:
         return self._retrycommand(span, 0, *args)
Beispiel #16
0
        def log(self, event, *msg, **opts):
            """Redirect filtered log event to a sampling file
            The configuration looks like:
            [sampling]
            filepath = path/to/file
            key.eventname = value
            key.eventname2 = value2

            If an event name appears in the config, it is logged to the
            samplingfile augmented with value stored as ref.

            Example:
            [sampling]
            filepath = path/to/file
            key.perfstatus = perf_status

            Assuming that we call:
            ui.log('perfstatus', t=3)
            ui.log('perfcommit', t=3)
            ui.log('perfstatus', t=42)

            Then we will log in path/to/file, two JSON strings separated by \0
            one for each perfstatus, like:
            {"event":"perfstatus",
             "ref":"perf_status",
             "msg":"",
             "opts":{"t":3}}\0
            {"event":"perfstatus",
             "ref":"perf_status",
             "msg":"",
             "opts":{"t":42}}\0

            We will also log any given environmental vars to the env_vars log,
            if configured::

              [sampling]
              env_vars = PATH,SHELL
            """
            if not util.safehasattr(self, "samplingfilters"):
                self.samplingfilters = logtofile.computesamplingfilters(self)
            if event not in self.samplingfilters:
                return super(logtofile, self).log(event, *msg, **opts)

            # special case: remove less interesting blocked fields starting
            # with "unknown_" or "alias_".
            if event == "measuredtimes":
                opts = {
                    k: v
                    for k, v in opts.items()
                    if (not k.startswith("alias_")
                        and not k.startswith("unknown_"))
                }

            ref = self.samplingfilters[event]
            script = _getcandidatelocation(ui)
            if script:
                debug = self.configbool("sampling", "debug")
                try:
                    opts["metrics_type"] = event
                    if msg and event != "metrics":
                        # do not keep message for "metrics", which only wants
                        # to log key/value dict.
                        if len(msg) == 1:
                            # don't try to format if there is only one item.
                            opts["msg"] = msg[0]
                        else:
                            # ui.log treats msg as a format string + format args.
                            try:
                                opts["msg"] = msg[0] % msg[1:]
                            except TypeError:
                                # formatting failed - just log each item of the
                                # message separately.
                                opts["msg"] = " ".join(msg)
                    with open(script, "a") as outfile:
                        outfile.write(
                            _toutf8lossy(
                                json.dumps({
                                    "data": opts,
                                    "category": ref
                                })))
                        outfile.write("\0")
                    if debug:
                        ui.write_err("%s\n" % _toutf8lossy(
                            json.dumps({
                                "data": opts,
                                "category": ref
                            })))
                except EnvironmentError:
                    pass
            return super(logtofile, self).log(event, *msg, **opts)
Beispiel #17
0
 def serialize(cls, input):
     return json.dumps(input)