예제 #1
0
파일: revmap.py 프로젝트: simpkins/eden
 def append(self, hsh, sidebranch=False, path=None, flush=False):
     """add a binary hg hash and return the mapped linelog revision.
     if flush is True, incrementally update the file.
     """
     if hsh in self._hsh2rev:
         raise error.CorruptedFileError("%r is in revmap already" %
                                        hex(hsh))
     if len(hsh) != _hshlen:
         raise hgerror.ProgrammingError("hsh must be %d-char long" %
                                        _hshlen)
     if path is not None and not isinstance(path, str):
         raise hgerror.ProgrammingError("path must be str")
     idx = len(self._rev2hsh)
     flag = 0
     if sidebranch:
         flag |= sidebranchflag
     if path is not None and path != self._renamepaths[-1]:
         flag |= renameflag
         self._renamerevs.append(idx)
         self._renamepaths.append(path)
     self._rev2hsh.append(hsh)
     self._rev2flag.append(flag)
     self._hsh2rev[hsh] = idx
     if flush:
         self.flush()
     return idx
예제 #2
0
파일: globalrevs.py 프로젝트: simpkins/eden
        def revisionnumberfromdb(self):
            # This must be executed while the SQL lock is taken
            if not self.hassqlwritelock():
                raise error.ProgrammingError("acquiring globalrev needs SQL write lock")

            reponame = self._globalrevsreponame
            cursor = self.sqlcursor

            cursor.execute(
                "SELECT value FROM revision_references "
                + "WHERE repo = %s AND "
                + "namespace = 'counter' AND "
                + "name='commit' ",
                (reponame,),
            )

            counterresults = cursor.fetchall()
            if len(counterresults) == 1:
                return int(counterresults[0][0])
            elif len(counterresults) == 0:
                raise error.Abort(
                    CorruptionException(
                        _("no commit counters for %s in database") % reponame
                    )
                )
            else:
                raise error.Abort(
                    CorruptionException(
                        _("multiple commit counters for %s in database") % reponame
                    )
                )
예제 #3
0
 def node(self, rev):
     # This is a hack.
     if isint(rev):
         raise error.ProgrammingError(
             "remotefilelog does not convert integer rev to node"
         )
     return rev
예제 #4
0
파일: json.py 프로젝트: jsoref/eden
def dumps(obj, paranoid=True):
    if obj is None:
        return "null"
    elif obj is False:
        return "false"
    elif obj is True:
        return "true"
    elif isinstance(obj, (int, long, float)):
        return pycompat.bytestr(obj)
    elif isinstance(obj, bytes):
        return '"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
    elif isinstance(obj, str):
        # This branch is unreachable on Python 2, because bytes == str
        # and we'll return in the next-earlier block in the elif
        # ladder. On Python 3, this helps us catch bugs before they
        # hurt someone.
        raise error.ProgrammingError(
            "Mercurial only does output with bytes on Python 3: %r" % obj)
    elif util.safehasattr(obj, "keys"):
        out = [
            '"%s": %s' %
            (encoding.jsonescape(k, paranoid=paranoid), dumps(v, paranoid))
            for k, v in sorted(obj.iteritems())
        ]
        return "{" + ", ".join(out) + "}"
    elif util.safehasattr(obj, "__iter__"):
        out = [dumps(i, paranoid) for i in obj]
        return "[" + ", ".join(out) + "]"
    else:
        raise TypeError("cannot encode type %s" % obj.__class__.__name__)
예제 #5
0
 def flags(self, node):
     if isinstance(node, int):
         raise error.ProgrammingError(
             "remotefilelog does not accept integer rev for flags")
     if node == nullid:
         return revlog.REVIDX_DEFAULT_FLAGS
     store = self.repo.fileslog.contentstore
     return store.getmeta(self.filename, node).get(constants.METAKEYFLAG, 0)
예제 #6
0
파일: shallowutil.py 프로젝트: iammxt/eden
def buildfileblobheader(size, flags, version=1):
    """return the header of a remotefilelog blob.

    see remotefilelogserver.createfileblob for the format.
    approximately the reverse of parsesizeflags.

    version can currently only be 1, which is the default
    """
    if version == 1:
        header = "v1\n%s%d\n%s%d" % (
            constants.METAKEYSIZE,
            size,
            constants.METAKEYFLAG,
            flags,
        )
    elif version == 0:
        raise error.ProgrammingError("fileblob version 0 no longer supported")
    else:
        raise error.ProgrammingError("unknown fileblob version %d" % version)
    return header
예제 #7
0
파일: context.py 프로젝트: simpkins/eden
 def getllrev(f):
     """(fctx) -> int"""
     # f should not be a linelog revision
     if isinstance(f, int):
         raise error.ProgrammingError("f should not be an int")
     # f is a fctx, allocate linelog rev on demand
     hsh = f.node()
     rev = revmap.hsh2rev(hsh)
     if rev is None:
         rev = revmap.append(hsh, sidebranch=True, path=f.path())
     return rev
예제 #8
0
 def runbgcommand(script, env, shell=False, stdout=None, stderr=None):
     """Spawn a command without waiting for it to finish."""
     # According to the Python standard library, we can't use close_fds
     # *and* redirect std*. I'm not sure that we need to because the
     # detached process has no console connection.
     if stdout is not None or stderr is not None:
         raise error.ProgrammingError(
             "runbgcommand on Windows does not support stdout or stderr"
         )
     subprocess.Popen(
         script, shell=shell, env=env, close_fds=True, creationflags=_creationflags
     )
예제 #9
0
파일: __init__.py 프로젝트: miscreant1/eden
 def changelogadd(orig, self, *args):
     oldlen = len(self)
     node = orig(self, *args)
     newlen = len(self)
     if oldlen != newlen:
         for oldargs in pendingfilecommits:
             log, rt, tr, link, p1, p2, n, fl, c, m = oldargs
             linknode = self.node(link)
             if linknode == node:
                 log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
             else:
                 raise error.ProgrammingError(
                     "pending multiple integer revisions are not supported")
     else:
         # "link" is actually wrong here (it is set to len(changelog))
         # if changelog remains unchanged, skip writing file revisions
         # but still do a sanity check about pending multiple revisions
         if len(set(x[3] for x in pendingfilecommits)) > 1:
             raise error.ProgrammingError(
                 "pending multiple integer revisions are not supported")
     del pendingfilecommits[:]
     return node
예제 #10
0
파일: shallowutil.py 프로젝트: jsoref/eden
def buildpackmeta(metadict):
    """like _buildpackmeta, but typechecks metadict and normalize it.

    This means, METAKEYSIZE and METAKEYSIZE should have integers as values,
    and METAKEYFLAG will be dropped if its value is 0.
    """
    newmeta = {}
    for k, v in (metadict or {}).iteritems():
        expectedtype = _metaitemtypes.get(k, (bytes,))
        if not isinstance(v, expectedtype):
            raise error.ProgrammingError("packmeta: wrong type of key %s" % k)
        # normalize int to binary buffer
        if int in expectedtype:
            # optimization: remove flag if it's 0 to save space
            if k == constants.METAKEYFLAG and v == 0:
                continue
            v = int2bin(v)
        newmeta[k] = v
    return _buildpackmeta(newmeta)
예제 #11
0
파일: shallowutil.py 프로젝트: iammxt/eden
def buildpackmeta(metadict):
    # type: (Mapping[str, int]) -> bytes
    """like _buildpackmeta, but typechecks metadict and normalize it.

    This means, METAKEYSIZE and METAKEYSIZE should have integers as values,
    and METAKEYFLAG will be dropped if its value is 0.
    """
    newmeta = {}
    for k, v in pycompat.iteritems((metadict or {})):
        expectedtype = _metaitemtypes.get(k, (bytes, ))
        # pyre-fixme[6]: Expected `Union[typing.Type[typing.Any],
        #  typing.Tuple[typing.Type[typing.Any], ...]]` for 2nd param but got `Any`.
        if not isinstance(v, expectedtype):
            raise error.ProgrammingError("packmeta: wrong type of key %s" % k)
        # normalize int to binary buffer
        if int in expectedtype:
            # optimization: remove flag if it's 0 to save space
            if k == constants.METAKEYFLAG and v == 0:
                continue
            v = int2bin(v)
        newmeta[k] = v
    return _buildpackmeta(newmeta)
예제 #12
0
파일: shallowutil.py 프로젝트: jsoref/eden
def _buildpackmeta(metadict):
    """reverse of _parsepackmeta, dict -> bytes (<metadata-list>)

    The dict contains raw content - both keys and values are strings.
    Upper-level business may want to serialize some of other types (like
    integers) to strings before calling this function.

    raise ProgrammingError when metadata key is illegal, or ValueError if
    length limit is exceeded
    """
    metabuf = ""
    for k, v in sorted((metadict or {}).iteritems()):
        if len(k) != 1:
            raise error.ProgrammingError("packmeta: illegal key: %s" % k)
        if len(v) > 0xFFFE:
            raise ValueError("metadata value is too long: 0x%x > 0xfffe" % len(v))
        metabuf += k
        metabuf += struct.pack("!H", len(v))
        metabuf += v
    # len(metabuf) is guaranteed representable in 4 bytes, because there are
    # only 256 keys, and for each value, len(value) <= 0xfffe.
    return metabuf
예제 #13
0
파일: blobstore.py 프로젝트: simpkins/eden
    def _batch(self, pointers, localstore, action, objectnames=None):
        if action not in ["upload", "download"]:
            raise error.ProgrammingError("invalid Git-LFS action: %s" % action)

        response = self._batchrequest(pointers, action)
        objects = self._extractobjects(response, pointers, action)
        total = sum(x.get("size", 0) for x in objects)
        perftrace.tracebytes("Size", total)
        sizes = {}
        for obj in objects:
            sizes[obj.get("oid")] = obj.get("size", 0)
        topic = {
            "upload": _("lfs uploading"),
            "download": _("lfs downloading")
        }[action]
        if self.ui.verbose and len(objects) > 1:
            self.ui.write(
                _("lfs: need to transfer %d objects (%s)\n") %
                (len(objects), util.bytecount(total)))

        def transfer(chunk):
            for obj in chunk:
                objsize = obj.get("size", 0)
                if self.ui.verbose:
                    if action == "download":
                        msg = _("lfs: downloading %s (%s)\n")
                    elif action == "upload":
                        msg = _("lfs: uploading %s (%s)\n")
                    self.ui.write(msg %
                                  (obj.get("oid"), util.bytecount(objsize)))
                retry = self.retry
                while True:
                    try:
                        yield 0, obj.get("oid")
                        self._basictransfer(obj, action, localstore)
                        yield 1, obj.get("oid")
                        break
                    except Exception as ex:
                        if retry > 0:
                            if self.ui.verbose:
                                self.ui.write(
                                    _("lfs: failed: %r (remaining retry %d)\n")
                                    % (ex, retry))
                            retry -= 1
                            continue
                        raise

        starttimestamp = util.timer()
        if action == "download":
            oids = worker.worker(
                self.ui,
                0.1,
                transfer,
                (),
                sorted(objects, key=lambda o: o.get("oid")),
                preferthreads=True,
                callsite="blobstore",
            )
        else:
            oids = transfer(objects)

        transferred = 0
        with progress.bar(self.ui,
                          topic,
                          _("bytes"),
                          total=total,
                          formatfunc=util.bytecount) as prog:
            for count, oid in oids:
                if count != 0:
                    transferred += sizes[oid]
                    if self.ui.verbose:
                        self.ui.write(_("lfs: processed: %s\n") % oid)
                if objectnames is not None:
                    prog.value = (transferred, objectnames.get(oid, ""))
                else:
                    prog.value = transferred

        currenttimestamp = util.timer()
        self._metrics["lfs_%s_size" % action] += total
        self._metrics["lfs_%s_time" % action] += (currenttimestamp - max(
            self._timestamp["latest_%s_timestamp" % action],
            starttimestamp)) * 1000
        self._timestamp["latest_%s_timestamp" % action] = currenttimestamp
예제 #14
0
파일: repack.py 프로젝트: leszfb/eden
def _getstores(repo, category):
    if category == constants.FILEPACK_CATEGORY:
        return (repo.fileslog.contentstore, repo.fileslog.metadatastore)
    elif category == constants.TREEPACK_CATEGORY:
        return (repo.manifestlog.datastore, repo.manifestlog.historystore)
    raise error.ProgrammingError("invalid pack category")
예제 #15
0
 def revdiff(self, node1, node2):
     if node1 != nullid and (self.flags(node1) or self.flags(node2)):
         raise error.ProgrammingError("cannot revdiff revisions with non-zero flags")
     return mdiff.textdiff(
         self.revision(node1, raw=True), self.revision(node2, raw=True)
     )
예제 #16
0
def _undoto(ui, repo, reverseindex, keep=False, branch=None):
    # undo to specific reverseindex
    # branch is a changectx hash (potentially short form)
    # which identifies its branch via localbranch revset

    if branch and repo.ui.configbool("experimental", "narrow-heads"):
        raise error.Abort(
            _("'undo --branch' is no longer supported in the current setup"))

    if repo != repo.unfiltered():
        raise error.ProgrammingError(_("_undoto expects unfilterd repo"))
    try:
        nodedict = _readindex(repo, reverseindex)
    except IndexError:
        raise error.Abort(_("index out of bounds"))

    # bookmarks
    bookstring = _readnode(repo, "bookmarks.i", nodedict["bookmarks"])
    booklist = bookstring.split("\n")
    if branch:
        spec = revsetlang.formatspec("_localbranch(%s)", branch)
        branchcommits = tohexnode(repo, spec)
    else:
        branchcommits = False

    # copy implementation for bookmarks
    itercopy = []
    for mark in pycompat.iteritems(repo._bookmarks):
        itercopy.append(mark)
    bmremove = []
    for mark in itercopy:
        if not branchcommits or hex(mark[1]) in branchcommits:
            bmremove.append((mark[0], None))
    repo._bookmarks.applychanges(repo, repo.currenttransaction(), bmremove)
    bmchanges = []
    for mark in booklist:
        if mark:
            kv = mark.rsplit(" ", 1)
            if not branchcommits or kv[1] in branchcommits or (
                    kv[0], None) in bmremove:
                bmchanges.append((kv[0], bin(kv[1])))
    repo._bookmarks.applychanges(repo, repo.currenttransaction(), bmchanges)

    # working copy parent
    workingcopyparent = _readnode(repo, "workingparent.i",
                                  nodedict["workingparent"])
    if not keep:
        if not branchcommits or workingcopyparent in branchcommits:
            # bailifchanged is run, so this should be safe
            hg.clean(repo, workingcopyparent, show_stats=False)
    elif not branchcommits or workingcopyparent in branchcommits:
        # keeps working copy files
        prednode = bin(workingcopyparent)
        predctx = repo[prednode]

        changedfiles = []
        wctx = repo[None]
        wctxmanifest = wctx.manifest()
        predctxmanifest = predctx.manifest()
        dirstate = repo.dirstate
        diff = predctxmanifest.diff(wctxmanifest)
        changedfiles.extend(pycompat.iterkeys(diff))

        with dirstate.parentchange():
            dirstate.rebuild(prednode, predctxmanifest, changedfiles)
            # we want added and removed files to be shown
            # properly, not with ? and ! prefixes
            for filename, data in pycompat.iteritems(diff):
                if data[0][0] is None:
                    dirstate.add(filename)
                if data[1][0] is None:
                    dirstate.remove(filename)

    # visible changesets
    addedrevs = revsetlang.formatspec("olddraft(0) - olddraft(%d)",
                                      reverseindex)
    removedrevs = revsetlang.formatspec("olddraft(%d) - olddraft(0)",
                                        reverseindex)
    if not branch:
        if repo.ui.configbool("experimental", "narrow-heads"):
            # Assuming mutation and visibility are used. Restore visibility heads
            # directly.
            _restoreheads(repo, reverseindex)
        else:
            # Legacy path.
            smarthide(repo, addedrevs, removedrevs)
            revealcommits(repo, removedrevs)
    else:
        localadds = revsetlang.formatspec(
            "(olddraft(0) - olddraft(%d)) and"
            " _localbranch(%s)", reverseindex, branch)
        localremoves = revsetlang.formatspec(
            "(olddraft(%d) - olddraft(0)) and"
            " _localbranch(%s)", reverseindex, branch)
        smarthide(repo, localadds, removedrevs)
        smarthide(repo, addedrevs, localremoves, local=True)
        revealcommits(repo, localremoves)

    # informative output
    time = _readnode(repo, "date.i", nodedict["date"])
    time = util.datestr([float(x) for x in time.split(" ")])

    nodedict = _readindex(repo, reverseindex - 1)
    commandstr = _readnode(repo, "command.i", nodedict["command"])
    commandlist = commandstr.split("\0")[1:]
    commandstr = " ".join(commandlist)
    uimessage = _("undone to %s, before %s\n") % (time, commandstr)
    if reverseindex == 1 and commandlist[0] in ("commit", "amend"):
        command = commandlist[0]
        if command == "commit" and "--amend" in commandlist:
            command = "amend"
        oldcommithash = _readnode(repo, "workingparent.i",
                                  nodedict["workingparent"])
        shorthash = short(bin(oldcommithash))
        hintutil.trigger("undo-uncommit-unamend", command, shorthash)
    repo.ui.status((uimessage))