Пример #1
0
def _writetagcache(ui, repo, valid, cachetags):
    filename = _filename(repo)
    try:
        cachefile = repo.vfs(filename, 'w', atomictemp=True)
    except (OSError, IOError):
        return

    ui.log('tagscache', 'writing .hg/%s with %d tags\n',
           filename, len(cachetags))

    if valid[2]:
        cachefile.write('%d %s %s\n' % (valid[0], hex(valid[1]), hex(valid[2])))
    else:
        cachefile.write('%d %s\n' % (valid[0], hex(valid[1])))

    # Tag names in the cache are in UTF-8 -- which is the whole reason
    # we keep them in UTF-8 throughout this module.  If we converted
    # them local encoding on input, we would lose info writing them to
    # the cache.
    for (name, (node, hist)) in sorted(cachetags.iteritems()):
        for n in hist:
            cachefile.write("%s %s\n" % (hex(n), name))
        cachefile.write("%s %s\n" % (hex(node), name))

    try:
        cachefile.close()
    except (OSError, IOError):
        pass
Пример #2
0
def _writetagcache(ui, repo, valid, cachetags):
    filename = _filename(repo)
    try:
        cachefile = repo.vfs(filename, 'w', atomictemp=True)
    except (OSError, IOError):
        return

    ui.log('tagscache', 'writing .hg/%s with %d tags\n', filename,
           len(cachetags))

    if valid[2]:
        cachefile.write('%d %s %s\n' %
                        (valid[0], hex(valid[1]), hex(valid[2])))
    else:
        cachefile.write('%d %s\n' % (valid[0], hex(valid[1])))

    # Tag names in the cache are in UTF-8 -- which is the whole reason
    # we keep them in UTF-8 throughout this module.  If we converted
    # them local encoding on input, we would lose info writing them to
    # the cache.
    for (name, (node, hist)) in sorted(cachetags.iteritems()):
        for n in hist:
            cachefile.write("%s %s\n" % (hex(n), name))
        cachefile.write("%s %s\n" % (hex(node), name))

    try:
        cachefile.close()
    except (OSError, IOError):
        pass
Пример #3
0
 def write(self, repo):
     try:
         f = repo.vfs(_filename(repo), "w", atomictemp=True)
         cachekey = [hex(self.tipnode), str(self.tiprev)]
         if self.filteredhash is not None:
             cachekey.append(hex(self.filteredhash))
         f.write(" ".join(cachekey) + '\n')
         nodecount = 0
         for label, nodes in sorted(self.iteritems()):
             for node in nodes:
                 nodecount += 1
                 if node in self._closednodes:
                     state = 'c'
                 else:
                     state = 'o'
                 f.write("%s %s %s\n" % (hex(node), state,
                                         encoding.fromlocal(label)))
         f.close()
         repo.ui.log('branchcache',
                     'wrote %s branch cache with %d labels and %d nodes\n',
                     repo.filtername, len(self), nodecount)
     except (IOError, OSError, util.Abort), inst:
         repo.ui.debug("couldn't write branch cache: %s\n" % inst)
         # Abort may be raise by read only opener
         pass
Пример #4
0
 def write(self, repo):
     try:
         f = repo.vfs(_filename(repo), "w", atomictemp=True)
         cachekey = [hex(self.tipnode), str(self.tiprev)]
         if self.filteredhash is not None:
             cachekey.append(hex(self.filteredhash))
         f.write(" ".join(cachekey) + '\n')
         nodecount = 0
         for label, nodes in sorted(self.iteritems()):
             for node in nodes:
                 nodecount += 1
                 if node in self._closednodes:
                     state = 'c'
                 else:
                     state = 'o'
                 f.write("%s %s %s\n" % (hex(node), state,
                                         encoding.fromlocal(label)))
         f.close()
         repo.ui.log('branchcache',
                     'wrote %s branch cache with %d labels and %d nodes\n',
                     repo.filtername, len(self), nodecount)
     except (IOError, OSError, util.Abort), inst:
         repo.ui.debug("couldn't write branch cache: %s\n" % inst)
         # Abort may be raise by read only opener
         pass
Пример #5
0
    def single(rev, seqno, fp):
        ctx = repo[rev]
        node = ctx.node()
        parents = [p.node() for p in ctx.parents() if p]
        branch = ctx.branch()
        if switch_parent:
            parents.reverse()
        prev = (parents and parents[0]) or nullid

        if not fp:
            fp = cmdutil.make_file(repo, template, node, total=total,
                                   seqno=seqno, revwidth=revwidth,
                                   mode='ab')
        if fp != sys.stdout and hasattr(fp, 'name'):
            repo.ui.note("%s\n" % fp.name)

        fp.write("# HG changeset patch\n")
        fp.write("# User %s\n" % ctx.user())
        fp.write("# Date %d %d\n" % ctx.date())
        if branch and (branch != 'default'):
            fp.write("# Branch %s\n" % branch)
        fp.write("# Node ID %s\n" % hex(node))
        fp.write("# Parent  %s\n" % hex(prev))
        if len(parents) > 1:
            fp.write("# Parent  %s\n" % hex(parents[1]))
        fp.write(ctx.description().rstrip())
        fp.write("\n\n")

        for chunk in diff(repo, prev, node, opts=opts):
            fp.write(chunk)
Пример #6
0
 def commit(self):
     """Write current state on disk (if necessary)"""
     if self._dirty:
         records = []
         records.append(('L', hex(self._local)))
         records.append(('O', hex(self._other)))
         for d, v in self._state.iteritems():
             records.append(('F', '\0'.join([d] + v)))
         self._writerecords(records)
         self._dirty = False
Пример #7
0
 def commit(self):
     """Write current state on disk (if necessary)"""
     if self._dirty:
         records = []
         records.append(("L", hex(self._local)))
         records.append(("O", hex(self._other)))
         for d, v in self._state.iteritems():
             records.append(("F", "\0".join([d] + v)))
         self._writerecords(records)
         self._dirty = False
Пример #8
0
 def write(self, repo):
     try:
         f = repo.opener(_filename(repo), "w", atomictemp=True)
         cachekey = [hex(self.tipnode), str(self.tiprev)]
         if self.filteredhash is not None:
             cachekey.append(hex(self.filteredhash))
         f.write(" ".join(cachekey) + '\n')
         for label, nodes in sorted(self.iteritems()):
             for node in nodes:
                 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
         f.close()
     except (IOError, OSError, util.Abort):
         # Abort may be raise by read only opener
         pass
Пример #9
0
 def write(self, repo):
     try:
         f = repo.opener(_filename(repo), "w", atomictemp=True)
         cachekey = [hex(self.tipnode), str(self.tiprev)]
         if self.filteredhash is not None:
             cachekey.append(hex(self.filteredhash))
         f.write(" ".join(cachekey) + '\n')
         for label, nodes in sorted(self.iteritems()):
             for node in nodes:
                 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
         f.close()
     except (IOError, OSError, util.Abort):
         # Abort may be raise by read only opener
         pass
Пример #10
0
    def create(self, transaction, prec, succs=(), flag=0, metadata=None):
        """obsolete: add a new obsolete marker

        * ensuring it is hashable
        * check mandatory metadata
        * encode metadata

        If you are a human writing code creating marker you want to use the
        `createmarkers` function in this module instead.

        return True if a new marker have been added, False if the markers
        already existed (no op).
        """
        if metadata is None:
            metadata = {}
        if 'date' not in metadata:
            metadata['date'] = "%d %d" % util.makedate()
        if len(prec) != 20:
            raise ValueError(prec)
        for succ in succs:
            if len(succ) != 20:
                raise ValueError(succ)
        if prec in succs:
            raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
        marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
        return bool(self.add(transaction, [marker]))
Пример #11
0
def listphases(repo):
    """List phases root for serialization over pushkey"""
    keys = {}
    value = "%i" % draft
    for root in repo._phasecache.phaseroots[draft]:
        keys[hex(root)] = value

    if repo.ui.configbool("phases", "publish", True):
        # Add an extra data to let remote know we are a publishing
        # repo. Publishing repo can't just pretend they are old repo.
        # When pushing to a publishing repo, the client still need to
        # push phase boundary
        #
        # Push do not only push changeset. It also push phase data.
        # New phase data may apply to common changeset which won't be
        # push (as they are common). Here is a very simple example:
        #
        # 1) repo A push changeset X as draft to repo B
        # 2) repo B make changeset X public
        # 3) repo B push to repo A. X is not pushed but the data that
        #    X as now public should
        #
        # The server can't handle it on it's own as it has no idea of
        # client phase data.
        keys["publishing"] = "True"
    return keys
Пример #12
0
 def checkhash(self, text, p1, p2, node, rev=None):
     if node != self.hash(text, p1, p2):
         revornode = rev
         if revornode is None:
             revornode = templatefilters.short(hex(node))
         raise RevlogError(_("integrity check failed on %s:%s")
             % (self.indexfile, revornode))
Пример #13
0
        def metadata():
            base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
                hex(repo.changelog.node(0)), hex(node), ctx.branch())

            tags = ''.join('tag: %s\n' % t for t in ctx.tags()
                           if repo.tagtype(t) == 'global')
            if not tags:
                repo.ui.pushbuffer()
                opts = {'template': '{latesttag}\n{latesttagdistance}',
                        'style': '', 'patch': None, 'git': None}
                cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
                ltags, dist = repo.ui.popbuffer().split('\n')
                tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
                tags += 'latesttagdistance: %s\n' % dist

            return base + tags
def showmanifest(**args):
    repo, ctx, templ = args['repo'], args['ctx'], args['templ']
    args = args.copy()
    args.update(
        dict(rev=repo.manifest.rev(ctx.changeset()[0]),
             node=hex(ctx.changeset()[0])))
    return templ('manifest', **args)
Пример #15
0
def listphases(repo):
    """List phases root for serialization over pushkey"""
    keys = {}
    value = '%i' % draft
    for root in repo._phasecache.phaseroots[draft]:
        keys[hex(root)] = value

    if repo.ui.configbool('phases', 'publish', True):
        # Add an extra data to let remote know we are a publishing
        # repo. Publishing repo can't just pretend they are old repo.
        # When pushing to a publishing repo, the client still need to
        # push phase boundary
        #
        # Push do not only push changeset. It also push phase data.
        # New phase data may apply to common changeset which won't be
        # push (as they are common). Here is a very simple example:
        #
        # 1) repo A push changeset X as draft to repo B
        # 2) repo B make changeset X public
        # 3) repo B push to repo A. X is not pushed but the data that
        #    X as now public should
        #
        # The server can't handle it on it's own as it has no idea of
        # client phase data.
        keys['publishing'] = 'True'
    return keys
Пример #16
0
    def create(self, transaction, prec, succs=(), flag=0, parents=None,
               date=None, metadata=None):
        """obsolete: add a new obsolete marker

        * ensuring it is hashable
        * check mandatory metadata
        * encode metadata

        If you are a human writing code creating marker you want to use the
        `createmarkers` function in this module instead.

        return True if a new marker have been added, False if the markers
        already existed (no op).
        """
        if metadata is None:
            metadata = {}
        if date is None:
            if 'date' in metadata:
                # as a courtesy for out-of-tree extensions
                date = util.parsedate(metadata.pop('date'))
            else:
                date = util.makedate()
        if len(prec) != 20:
            raise ValueError(prec)
        for succ in succs:
            if len(succ) != 20:
                raise ValueError(succ)
        if prec in succs:
            raise ValueError(_('in-marker cycle with %s') % node.hex(prec))

        metadata = tuple(sorted(metadata.iteritems()))

        marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
        return bool(self.add(transaction, [marker]))
Пример #17
0
    def add(self, manifest, files, desc, transaction, p1, p2,
                  user, date=None, extra={}):
        user = user.strip()
        # An empty username or a username with a "\n" will make the
        # revision text contain two "\n\n" sequences -> corrupt
        # repository since read cannot unpack the revision.
        if not user:
            raise error.RevlogError(_("empty username"))
        if "\n" in user:
            raise error.RevlogError(_("username %s contains a newline")
                                    % repr(user))

        # strip trailing whitespace and leading and trailing empty lines
        desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')

        user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)

        if date:
            parseddate = "%d %d" % util.parsedate(date)
        else:
            parseddate = "%d %d" % util.makedate()
        if extra and extra.get("branch") in ("default", ""):
            del extra["branch"]
        if extra:
            extra = encodeextra(extra)
            parseddate = "%s %s" % (parseddate, extra)
        l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc]
        text = "\n".join(l)
        return self.addrevision(text, transaction, len(self), p1, p2)
Пример #18
0
    def add(self, fcl, fco, fca, fd):
        """add a new (potentially?) conflicting file the merge state
        fcl: file context for local,
        fco: file context for remote,
        fca: file context for ancestors,
        fd:  file path of the resulting merge.

        note: also write the local version to the `.hg/merge` directory.
        """
        hash = util.sha1(fcl.path()).hexdigest()
        self._repo.vfs.write('merge/' + hash, fcl.data())
        self._state[fd] = ['u', hash, fcl.path(),
                           fca.path(), hex(fca.filenode()),
                           fco.path(), hex(fco.filenode()),
                           fcl.flags()]
        self._dirty = True
def lookup(repo, proto, key):
    try:
        r = hex(repo.lookup(key))
        success = 1
    except Exception, inst:
        r = str(inst)
        success = 0
Пример #20
0
def _changegroupinfo(repo, nodes, source):
    if repo.ui.verbose or source == 'bundle':
        repo.ui.status(_("%d changesets found\n") % len(nodes))
    if repo.ui.debugflag:
        repo.ui.debug("list of changesets:\n")
        for node in nodes:
            repo.ui.debug("%s\n" % hex(node))
Пример #21
0
    def add(self, fcl, fco, fca, fd):
        """add a new (potentially?) conflicting file the merge state
        fcl: file context for local,
        fco: file context for remote,
        fca: file context for ancestors,
        fd:  file path of the resulting merge.

        note: also write the local version to the `.hg/merge` directory.
        """
        hash = util.sha1(fcl.path()).hexdigest()
        self._repo.opener.write("merge/" + hash, fcl.data())
        self._state[fd] = ['u', hash, fcl.path(),
                           fca.path(), hex(fca.filenode()),
                           fco.path(), hex(fco.filenode()),
                           fcl.flags()]
        self._dirty = True
Пример #22
0
    def edit(self, text, user, extra={}, editform=None):
        (fd, name) = tempfile.mkstemp(prefix="hg-editor-",
                                      suffix=".txt",
                                      text=True)
        try:
            f = os.fdopen(fd, "w")
            f.write(text)
            f.close()

            environ = {'HGUSER': user}
            if 'transplant_source' in extra:
                environ.update({'HGREVISION': hex(extra['transplant_source'])})
            for label in ('source', 'rebase_source'):
                if label in extra:
                    environ.update({'HGREVISION': extra[label]})
                    break
            if editform:
                environ.update({'HGEDITFORM': editform})

            editor = self.geteditor()

            util.system("%s \"%s\"" % (editor, name),
                        environ=environ,
                        onerr=util.Abort,
                        errprefix=_("edit failed"),
                        out=self.fout)

            f = open(name)
            t = f.read()
            f.close()
        finally:
            os.unlink(name)

        return t
Пример #23
0
def lookup(repo, proto, key):
    try:
        r = hex(repo.lookup(encoding.tolocal(key)))
        success = 1
    except Exception, inst:
        r = str(inst)
        success = 0
Пример #24
0
def lookup(repo, proto, key):
    try:
        r = hex(repo.lookup(key))
        success = 1
    except Exception, inst:
        r = str(inst)
        success = 0
Пример #25
0
 def commit(self):
     if self._dirty:
         f = self._repo.opener("merge/state", "w")
         f.write(hex(self._local) + "\n")
         for d, v in self._state.iteritems():
             f.write("\0".join([d] + v) + "\n")
         self._dirty = False
Пример #26
0
    def create(self, transaction, prec, succs=(), flag=0, parents=None,
               date=None, metadata=None):
        """obsolete: add a new obsolete marker

        * ensuring it is hashable
        * check mandatory metadata
        * encode metadata

        If you are a human writing code creating marker you want to use the
        `createmarkers` function in this module instead.

        return True if a new marker have been added, False if the markers
        already existed (no op).
        """
        if metadata is None:
            metadata = {}
        if date is None:
            if 'date' in metadata:
                # as a courtesy for out-of-tree extensions
                date = util.parsedate(metadata.pop('date'))
            else:
                date = util.makedate()
        if len(prec) != 20:
            raise ValueError(prec)
        for succ in succs:
            if len(succ) != 20:
                raise ValueError(succ)
        if prec in succs:
            raise ValueError(_('in-marker cycle with %s') % node.hex(prec))

        metadata = tuple(sorted(metadata.iteritems()))

        marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
        return bool(self.add(transaction, [marker]))
 def commit(self):
     if self._dirty:
         f = self._repo.opener("merge/state", "w")
         f.write(hex(self._local) + "\n")
         for d, v in self._state.iteritems():
             f.write("\0".join([d] + v) + "\n")
         self._dirty = False
Пример #28
0
 def rev(self, node):
     """filtered version of revlog.rev"""
     r = super(changelog, self).rev(node)
     if r in self.filteredrevs:
         raise error.FilteredLookupError(hex(node), self.indexfile,
                                         _('filtered node'))
     return r
Пример #29
0
 def rev(self, node):
     """filtered version of revlog.rev"""
     r = super(changelog, self).rev(node)
     if r in self.filteredrevs:
         raise error.FilteredLookupError(hex(node), self.indexfile,
                                         _('filtered node'))
     return r
Пример #30
0
    def create(self, transaction, prec, succs=(), flag=0, metadata=None):
        """obsolete: add a new obsolete marker

        * ensuring it is hashable
        * check mandatory metadata
        * encode metadata

        If you are a human writing code creating marker you want to use the
        `createmarkers` function in this module instead.

        return True if a new marker have been added, False if the markers
        already existed (no op).
        """
        if metadata is None:
            metadata = {}
        if 'date' not in metadata:
            metadata['date'] = "%d %d" % util.makedate()
        if len(prec) != 20:
            raise ValueError(prec)
        for succ in succs:
            if len(succ) != 20:
                raise ValueError(succ)
        if prec in succs:
            raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
        marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
        return bool(self.add(transaction, [marker]))
Пример #31
0
def listphases(repo):
    """List phases root for serialisation over pushkey"""
    keys = {}
    value = '%i' % draft
    for root in repo._phaseroots[draft]:
        keys[hex(root)] = value

    if repo.ui.configbool('phases', 'publish', True):
        # Add an extra data to let remote know we are a publishing repo.
        # Publishing repo can't just pretend they are old repo. When pushing to
        # a publishing repo, the client still need to push phase boundary
        #
        # Push do not only push changeset. It also push phase data. New
        # phase data may apply to common changeset which won't be push (as they
        # are common).  Here is a very simple example:
        #
        # 1) repo A push changeset X as draft to repo B
        # 2) repo B make changeset X public
        # 3) repo B push to repo A. X is not pushed but the data that X as now
        #    public should
        #
        # The server can't handle it on it's own as it has no idea of client
        # phase data.
        keys['publishing'] = 'True'
    return keys
Пример #32
0
    def _partialmatch(self, id):
        try:
            n = self.index.partialmatch(id)
            if n and self.hasnode(n):
                return n
            return None
        except RevlogError:
            # parsers.c radix tree lookup gave multiple matches
            # fall through to slow path that filters hidden revisions
            pass
        except (AttributeError, ValueError):
            # we are pure python, or key was too short to search radix tree
            pass

        if id in self._pcache:
            return self._pcache[id]

        if len(id) < 40:
            try:
                # hex(node)[:...]
                l = len(id) // 2  # grab an even number of digits
                prefix = bin(id[:l * 2])
                nl = [e[7] for e in self.index if e[7].startswith(prefix)]
                nl = [
                    n for n in nl if hex(n).startswith(id) and self.hasnode(n)
                ]
                if len(nl) > 0:
                    if len(nl) == 1:
                        self._pcache[id] = nl[0]
                        return nl[0]
                    raise LookupError(id, self.indexfile,
                                      _('ambiguous identifier'))
                return None
            except TypeError:
                pass
Пример #33
0
    def _partialmatch(self, id):
        try:
            return self.index.partialmatch(id)
        except RevlogError:
            # parsers.c radix tree lookup gave multiple matches
            raise LookupError(id, self.indexfile, _("ambiguous identifier"))
        except (AttributeError, ValueError):
            # we are pure python, or key was too short to search radix tree
            pass

        if id in self._pcache:
            return self._pcache[id]

        if len(id) < 40:
            try:
                # hex(node)[:...]
                l = len(id) // 2  # grab an even number of digits
                prefix = bin(id[:l * 2])
                nl = [e[7] for e in self.index if e[7].startswith(prefix)]
                nl = [n for n in nl if hex(n).startswith(id)]
                if len(nl) > 0:
                    if len(nl) == 1:
                        self._pcache[id] = nl[0]
                        return nl[0]
                    raise LookupError(id, self.indexfile,
                                      _('ambiguous identifier'))
                return None
            except TypeError:
                pass
Пример #34
0
    def edit(self, text, user, extra={}):
        (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
                                      text=True)
        try:
            f = os.fdopen(fd, "w")
            f.write(text)
            f.close()

            environ = {'HGUSER': user}
            if 'transplant_source' in extra:
                environ.update({'HGREVISION': hex(extra['transplant_source'])})
            for label in ('source', 'rebase_source'):
                if label in extra:
                    environ.update({'HGREVISION': extra[label]})
                    break

            editor = self.geteditor()

            util.system("%s \"%s\"" % (editor, name),
                        environ=environ,
                        onerr=util.Abort, errprefix=_("edit failed"),
                        out=self.fout)

            f = open(name)
            t = f.read()
            f.close()
        finally:
            os.unlink(name)

        return t
Пример #35
0
def _changegroupinfo(repo, nodes, source):
    if repo.ui.verbose or source == 'bundle':
        repo.ui.status(_("%d changesets found\n") % len(nodes))
    if repo.ui.debugflag:
        repo.ui.debug("list of changesets:\n")
        for node in nodes:
            repo.ui.debug("%s\n" % hex(node))
Пример #36
0
 def do_branchmap(self):
     branchmap = self.repo.branchmap()
     heads = []
     for branch, nodes in branchmap.iteritems():
         branchname = urllib.quote(branch)
         branchnodes = [hex(node) for node in nodes]
         heads.append('%s %s' % (branchname, ' '.join(branchnodes)))
     self.respond('\n'.join(heads))
Пример #37
0
 def do_branchmap(self):
     branchmap = self.repo.branchmap()
     heads = []
     for branch, nodes in branchmap.iteritems():
         branchname = urllib.quote(branch)
         branchnodes = [hex(node) for node in nodes]
         heads.append('%s %s' % (branchname, ' '.join(branchnodes)))
     self.respond('\n'.join(heads))
Пример #38
0
def _writetagcache(ui, repo, heads, tagfnode, cachetags):

    try:
        cachefile = repo.opener('cache/tags', 'w', atomictemp=True)
    except (OSError, IOError):
        return

    ui.log('tagscache', 'writing tags cache file with %d heads and %d tags\n',
            len(heads), len(cachetags))

    realheads = repo.heads()            # for sanity checks below
    for head in heads:
        # temporary sanity checks; these can probably be removed
        # once this code has been in crew for a few weeks
        assert head in repo.changelog.nodemap, \
               'trying to write non-existent node %s to tag cache' % short(head)
        assert head in realheads, \
               'trying to write non-head %s to tag cache' % short(head)
        assert head != nullid, \
               'trying to write nullid to tag cache'

        # This can't fail because of the first assert above.  When/if we
        # remove that assert, we might want to catch LookupError here
        # and downgrade it to a warning.
        rev = repo.changelog.rev(head)

        fnode = tagfnode.get(head)
        if fnode:
            cachefile.write('%d %s %s\n' % (rev, hex(head), hex(fnode)))
        else:
            cachefile.write('%d %s\n' % (rev, hex(head)))

    # Tag names in the cache are in UTF-8 -- which is the whole reason
    # we keep them in UTF-8 throughout this module.  If we converted
    # them local encoding on input, we would lose info writing them to
    # the cache.
    cachefile.write('\n')
    for (name, (node, hist)) in cachetags.iteritems():
        for n in hist:
            cachefile.write("%s %s\n" % (hex(n), name))
        cachefile.write("%s %s\n" % (hex(node), name))

    try:
        cachefile.close()
    except (OSError, IOError):
        pass
Пример #39
0
def showmanifest(**args):
    repo, ctx, templ = args['repo'], args['ctx'], args['templ']
    args = args.copy()
    args.update({
        'rev': repo.manifest.rev(ctx.changeset()[0]),
        'node': hex(ctx.changeset()[0])
    })
    return templ('manifest', **args)
Пример #40
0
def _writetagcache(ui, repo, heads, tagfnode, cachetags):

    try:
        cachefile = repo.vfs('cache/tags', 'w', atomictemp=True)
    except (OSError, IOError):
        return

    ui.log('tagscache', 'writing tags cache file with %d heads and %d tags\n',
           len(heads), len(cachetags))

    realheads = repo.heads()  # for sanity checks below
    for head in heads:
        # temporary sanity checks; these can probably be removed
        # once this code has been in crew for a few weeks
        assert head in repo.changelog.nodemap, \
               'trying to write non-existent node %s to tag cache' % short(head)
        assert head in realheads, \
               'trying to write non-head %s to tag cache' % short(head)
        assert head != nullid, \
               'trying to write nullid to tag cache'

        # This can't fail because of the first assert above.  When/if we
        # remove that assert, we might want to catch LookupError here
        # and downgrade it to a warning.
        rev = repo.changelog.rev(head)

        fnode = tagfnode.get(head)
        if fnode:
            cachefile.write('%d %s %s\n' % (rev, hex(head), hex(fnode)))
        else:
            cachefile.write('%d %s\n' % (rev, hex(head)))

    # Tag names in the cache are in UTF-8 -- which is the whole reason
    # we keep them in UTF-8 throughout this module.  If we converted
    # them local encoding on input, we would lose info writing them to
    # the cache.
    cachefile.write('\n')
    for (name, (node, hist)) in cachetags.iteritems():
        for n in hist:
            cachefile.write("%s %s\n" % (hex(n), name))
        cachefile.write("%s %s\n" % (hex(node), name))

    try:
        cachefile.close()
    except (OSError, IOError):
        pass
Пример #41
0
 def checkhash(self, text, p1, p2, node, rev=None):
     if node != hash(text, p1, p2):
         revornode = rev
         if revornode is None:
             revornode = templatefilters.short(hex(node))
         raise RevlogError(
             _("integrity check failed on %s:%s") %
             (self.indexfile, revornode))
Пример #42
0
 def __getitem__(self, key):
     try:
         return self.p.map[key]
     except KeyError:
         try:
             self.load(key)
             return self.p.map[key]
         except KeyError:
             raise KeyError("node " + hex(key))
Пример #43
0
 def do_lookup(self):
     arg, key = self.getarg()
     assert arg == 'key'
     try:
         r = hex(self.repo.lookup(key))
         success = 1
     except Exception,inst:
         r = str(inst)
         success = 0
 def __getitem__(self, key):
     try:
         return self.p.map[key]
     except KeyError:
         try:
             self.load(key)
             return self.p.map[key]
         except KeyError:
             raise KeyError("node " + hex(key))
Пример #45
0
 def do_lookup(self):
     arg, key = self.getarg()
     assert arg == 'key'
     try:
         r = hex(self.repo.lookup(key))
         success = 1
     except Exception, inst:
         r = str(inst)
         success = 0
Пример #46
0
def archive(repo,
            dest,
            node,
            kind,
            decode=True,
            matchfn=None,
            prefix=None,
            mtime=None):
    '''create archive of repo as it was at node.

    dest can be name of directory, name of archive file, or file
    object to write archive to.

    kind is type of archive to create.

    decode tells whether to put files through decode filters from
    hgrc.

    matchfn is function to filter names of files to write to archive.

    prefix is name of path to put before every archive member.'''
    def write(name, mode, islink, getdata):
        if matchfn and not matchfn(name): return
        data = getdata()
        if decode:
            data = repo.wwritedata(name, data)
        archiver.addfile(name, mode, islink, data)

    ctx = repo.changectx(node)
    if kind not in archivers:
        raise util.Abort(_("unknown archive type '%s'" % kind))
    archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
    m = ctx.manifest()
    items = m.items()
    items.sort()
    if repo.ui.configbool("ui", "archivemeta", True):
        write(
            '.hg_archival.txt', 0644, False, lambda: 'repo: %s\nnode: %s\n' %
            (hex(repo.changelog.node(0)), hex(node)))
    for filename, filenode in items:
        write(filename,
              m.execf(filename) and 0755 or 0644, m.linkf(filename),
              lambda: repo.file(filename).read(filenode))
    archiver.done()
Пример #47
0
 def commit(self, text, user, date):
     # don't bother committing in the subrepo if it's only been
     # updated
     if not self.dirty(True):
         return self._repo['.'].hex()
     self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
     n = self._repo.commit(text, user, date)
     if not n:
         return self._repo['.'].hex()  # different version checked out
     return node.hex(n)
Пример #48
0
 def primary(branch):
     if branch == '.':
         if not lrepo or not lrepo.local():
             raise util.Abort(_("dirstate branch not accessible"))
         branch = lrepo.dirstate.branch()
     if branch in branchmap:
         revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
         return True
     else:
         return False
Пример #49
0
def save_state(repo, state):
    f = repo.vfs("bisect.state", "w", atomictemp=True)
    wlock = repo.wlock()
    try:
        for kind in sorted(state):
            for node in state[kind]:
                f.write("%s %s\n" % (kind, hex(node)))
        f.close()
    finally:
        wlock.release()
Пример #50
0
def save_state(repo, state):
    f = repo.opener("bisect.state", "w", atomictemp=True)
    wlock = repo.wlock()
    try:
        for kind in state:
            for node in state[kind]:
                f.write("%s %s\n" % (kind, hex(node)))
        f.close()
    finally:
        wlock.release()
 def primary(butf8):
     if butf8 == '.':
         if not lrepo or not lrepo.local():
             raise util.Abort(_("dirstate branch not accessible"))
         butf8 = lrepo.dirstate.branch()
     if butf8 in branchmap:
         revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
         return True
     else:
         return False
Пример #52
0
 def primary(branch):
     if branch == '.':
         if not lrepo:
             raise util.Abort(_("dirstate branch not accessible"))
         branch = lrepo.dirstate.branch()
     if branch in branchmap:
         revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
         return True
     else:
         return False
Пример #53
0
def writeroots(repo):
    """Write phase roots from disk"""
    f = repo.sopener('phaseroots', 'w', atomictemp=True)
    try:
        for phase, roots in enumerate(repo._phaseroots):
            for h in roots:
                f.write('%i %s\n' % (phase, hex(h)))
        repo._dirtyphases = False
    finally:
        f.close()
Пример #54
0
    def single(rev, seqno, fp):
        ctx = repo[rev]
        node = ctx.node()
        parents = [p.node() for p in ctx.parents() if p]
        branch = ctx.branch()
        if switch_parent:
            parents.reverse()
        prev = (parents and parents[0]) or nullid

        shouldclose = False
        if not fp:
            desc_lines = ctx.description().rstrip().split('\n')
            desc = desc_lines[0]  #Commit always has a first line.
            fp = makefileobj(repo,
                             template,
                             node,
                             desc=desc,
                             total=total,
                             seqno=seqno,
                             revwidth=revwidth,
                             mode='ab')
            if fp != template:
                shouldclose = True
        if fp != sys.stdout and util.safehasattr(fp, 'name'):
            repo.ui.note("%s\n" % fp.name)

        fp.write("# HG changeset patch\n")
        fp.write("# User %s\n" % ctx.user())
        fp.write("# Date %d %d\n" % ctx.date())
        if branch and branch != 'default':
            fp.write("# Branch %s\n" % branch)
        fp.write("# Node ID %s\n" % hex(node))
        fp.write("# Parent  %s\n" % hex(prev))
        if len(parents) > 1:
            fp.write("# Parent  %s\n" % hex(parents[1]))
        fp.write(ctx.description().rstrip())
        fp.write("\n\n")

        for chunk in patch.diff(repo, prev, node, opts=opts):
            fp.write(chunk)

        if shouldclose:
            fp.close()
Пример #55
0
def writeroots(repo):
    """Write phase roots from disk"""
    f = repo.sopener('phaseroots', 'w', atomictemp=True)
    try:
        for phase, roots in enumerate(repo._phaseroots):
            for h in roots:
                f.write('%i %s\n' % (phase, hex(h)))
        repo._dirtyphases = False
    finally:
        f.close()
Пример #56
0
 def commit(self, text, user, date):
     # don't bother committing in the subrepo if it's only been
     # updated
     if not self.dirty(True):
         return self._repo['.'].hex()
     self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
     n = self._repo.commit(text, user, date)
     if not n:
         return self._repo['.'].hex() # different version checked out
     return node.hex(n)
Пример #57
0
 def _writerecordsv1(self, records):
     """Write current state on disk in a version 1 file"""
     f = self._repo.vfs(self.statepathv1, 'w')
     irecords = iter(records)
     lrecords = irecords.next()
     assert lrecords[0] == 'L'
     f.write(hex(self._local) + '\n')
     for rtype, data in irecords:
         if rtype == 'F':
             f.write('%s\n' % _droponode(data))
     f.close()
Пример #58
0
 def write(self):
     if not self.dirty:
         return
     f = self.opener('phaseroots', 'w', atomictemp=True)
     try:
         for phase, roots in enumerate(self.phaseroots):
             for h in roots:
                 f.write('%i %s\n' % (phase, hex(h)))
     finally:
         f.close()
     self.dirty = False