コード例 #1
0
    def recover(self, repo, source, opts):
        """commit working directory using journal metadata"""
        node, user, date, message, parents = self.readlog()
        merge = False

        if not user or not date or not message or not parents[0]:
            raise util.Abort(_("transplant log file is corrupt"))

        parent = parents[0]
        if len(parents) > 1:
            if opts.get("parent"):
                parent = source.lookup(opts["parent"])
                if parent not in parents:
                    raise util.Abort(_("%s is not a parent of %s") % (short(parent), short(node)))
            else:
                merge = True

        extra = {"transplant_source": node}
        wlock = repo.wlock()
        try:
            p1, p2 = repo.dirstate.parents()
            if p1 != parent:
                raise util.Abort(_("working dir not at transplant parent %s") % revlog.hex(parent))
            if merge:
                repo.setparents(p1, parents[1])
            n = repo.commit(message, user, date, extra=extra, editor=self.editor)
            if not n:
                raise util.Abort(_("commit failed"))
            if not merge:
                self.transplants.set(n, node)
            self.unlog()

            return n, node
        finally:
            wlock.release()
コード例 #2
0
ファイル: extdiff2.py プロジェクト: bukzor/dotfiles
def snapshot(ui, repo, files, node, tmproot, listsubrepos):
  """snapshot files as of some revision

    if not using snapshot, -I/-X does not work and recursive diff
    in tools like kdiff3 and meld displays too many files.
  """
  dirname = os.path.basename(repo.root)
  if dirname == '':
    dirname = 'root'
  if node is not None:
    dirname = '%s.%s' % (dirname, short(node))
  base = os.path.join(tmproot, dirname)
  os.mkdir(base)

  if node is not None:
    ui.note(_('making snapshot of %d files from rev %s\n') %
            (len(files), short(node)))
  else:
    ui.note(_('making snapshot of %d files from working directory\n') %
            (len(files)))

  if files:
    repo.ui.setconfig('ui', 'archivemeta', False)

    archival.archive(
        repo,
        base,
        node,
        'files',
        match=scmutil.matchfiles(repo, files),
        subrepos=listsubrepos)

  return dirname
コード例 #3
0
def hook(ui, repo, node, hooktype, **kwargs):
    repo_name = os.path.basename(repo.root)
    if repo_name not in hgNameToRevURL:
        return 0

    # All changesets from node to "tip" inclusive are part of this push.
    rev = repo.changectx(node).rev()
    tip = repo.changectx('tip').rev()

    num_changes = tip + 1 - rev
    url = 'https://hg.mozilla.org/' + hgNameToRevURL[repo_name]

    if num_changes <= 10:
        plural = 's' if num_changes > 1 else ''
        print 'You can view your change%s at the following URL%s:' % (plural, plural)

        for i in xrange(rev, tip + 1):
            node = short(repo.changectx(i).node())
            print '  %srev/%s' % (url, node)
    else:
        tip_node = short(repo.changectx(tip).node())
        print 'You can view the pushlog for your changes at the following URL:'
        print '  %spushloghtml?changeset=%s' % (url, tip_node)

    # For try repositories, also output a results dashboard url.
    if repo_name in ['try', 'try-comm-central']:
        tip_node = short(repo.changectx(tip).node())
        # TBPL uses alternative names that don't match buildbot or hg.
        tbpl_name = 'Thunderbird-Try' if repo_name == 'try-comm-central' else 'Try'
        print 'You can view the progress of your build at the following URL:'
        print '  https://treeherder.mozilla.org/#/jobs?repo=%s&revision=%s' % (repo_name, tip_node)
        print 'Alternatively, view them on TBPL (soon to be deprecated):'
        print '  https://tbpl.mozilla.org/?tree=%s&rev=%s' % (tbpl_name, tip_node)

    return 0
コード例 #4
0
ファイル: hgk.py プロジェクト: c0ns0le/cygwin
def catcommit(ui, repo, n, prefix, ctx=None):
    nlprefix = '\n' + prefix;
    if ctx is None:
        ctx = repo.changectx(n)
    (p1, p2) = ctx.parents()
    ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
    if p1: ui.write("parent %s\n" % short(p1.node()))
    if p2: ui.write("parent %s\n" % short(p2.node()))
    date = ctx.date()
    description = ctx.description().replace("\0", "")
    lines = description.splitlines()
    if lines and lines[-1].startswith('committer:'):
        committer = lines[-1].split(': ')[1].rstrip()
    else:
        committer = ctx.user()

    ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
    ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
    ui.write("revision %d\n" % ctx.rev())
    ui.write("branch %s\n\n" % ctx.branch())

    if prefix != "":
        ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
    else:
        ui.write(description + "\n")
    if prefix:
        ui.write('\0')
コード例 #5
0
ファイル: __init__.py プロジェクト: davidshepherd7/dotfiles
def startfrom(ui, repo, opts):
    base, dest = 'null', 'tip'
    if opts.get('bookmark'):
        dest = opts.get('bookmark')
    if opts.get('base'):
        base = opts['base']
        if opts.get('bookmark') not in repo:
            dest = base

    basectx = scmutil.revsingle(repo, base)
    destctx = scmutil.revsingle(repo, dest)
    ctx = list(repo.set("""
        last(
          %n::%n and (
             extra(p4changelist) or
             extra(p4fullimportbasechangelist)))""",
             basectx.node(), destctx.node()))
    if ctx:
        ctx = ctx[0]
        startcl = lastcl(ctx)
        ui.note(_('incremental import from changelist: %d, node: %s\n') %
                (startcl, short(ctx.node())))
        if ctx.node() == basectx.node():
            ui.note(_('creating branchpoint, base %s\n') %
                    short(basectx.node()))
            return ctx, startcl, True
        return ctx, startcl, False
    raise error.Abort(_('no valid p4 changelist number.'))
コード例 #6
0
ファイル: histedit.py プロジェクト: RayFerr000/PLTL
def movebookmarks(ui, repo, mapping, oldtopmost, newtopmost):
    """Move bookmark from old to newly created node"""
    if not mapping:
        # if nothing got rewritten there is not purpose for this function
        return
    moves = []
    for bk, old in sorted(repo._bookmarks.iteritems()):
        if old == oldtopmost:
            # special case ensure bookmark stay on tip.
            #
            # This is arguably a feature and we may only want that for the
            # active bookmark. But the behavior is kept compatible with the old
            # version for now.
            moves.append((bk, newtopmost))
            continue
        base = old
        new = mapping.get(base, None)
        if new is None:
            continue
        while not new:
            # base is killed, trying with parent
            base = repo[base].p1().node()
            new = mapping.get(base, (base,))
            # nothing to move
        moves.append((bk, new[-1]))
    if moves:
        marks = repo._bookmarks
        for mark, new in moves:
            old = marks[mark]
            ui.note(_('histedit: moving bookmarks %s from %s to %s\n')
                    % (mark, node.short(old), node.short(new)))
            marks[mark] = new
        marks.write()
コード例 #7
0
def rhsummary(ui, repo, **opts):
    """output the summary of the repository"""
    # see mercurial/commands.py:tip
    ui.write(':tip: rev node\n')
    tipctx = repo[len(repo) - 1]
    ui.write('%d %s\n' % (tipctx.rev(), tipctx))

    # see mercurial/commands.py:root
    ui.write(':root: path\n')
    ui.write(repo.root + '\n')

    # see mercurial/commands.py:tags
    ui.write(':tags: rev node name\n')
    for t, n in reversed(repo.tagslist()):
        if t in SPECIAL_TAGS:
            continue
        try:
            r = repo.changelog.rev(n)
        except error.LookupError:
            r = -1
        ui.write('%d %s %s\n' % (r, node.short(n), t))

    # see mercurial/commands.py:branches
    def iterbranches():
        for t, n in repo.branchtags().iteritems():
            yield t, n, repo.changelog.rev(n)

    ui.write(':branches: rev node name\n')
    for t, n, r in sorted(iterbranches(), key=lambda e: e[2], reverse=True):
        if repo.lookup(r) in repo.branchheads(t, closed=False):
            ui.write('%d %s %s\n' % (r, node.short(n), t))  # only open branch
コード例 #8
0
def _sanitycheck(ui, nodes, bases):
    """
    Does some basic sanity checking on a packfiles with ``nodes`` ``bases`` (a
    mapping of node->base):

    - Each deltabase must itself be a node elsewhere in the pack
    - There must be no cycles
    """
    failures = 0
    for node in nodes:
        seen = set()
        current = node
        deltabase = bases[current]

        while deltabase != nullid:
            if deltabase not in nodes:
                ui.warn(("Bad entry: %s has an unknown deltabase (%s)\n" %
                        (short(node), short(deltabase))))
                failures += 1
                break

            if deltabase in seen:
                ui.warn(("Bad entry: %s has a cycle (at %s)\n" %
                        (short(node), short(deltabase))))
                failures += 1
                break

            current = deltabase
            seen.add(current)
            deltabase = bases[current]
        # Since ``node`` begins a valid chain, reset/memoize its base to nullid
        # so we don't traverse it again.
        bases[node] = nullid
    return failures
コード例 #9
0
ファイル: bugzilla.py プロジェクト: sandeepprasanna/ODOO
 def filter_cset_known_bug_ids(self, node, ids):
     for id in sorted(ids):
         if self.get_bug_comments(id).find(short(node)) != -1:
             self.ui.status(_('bug %d already knows about changeset %s\n') %
                            (id, short(node)))
             ids.discard(id)
     return ids
コード例 #10
0
def _printupdatednode(repo, oldnode, newnodes):
    # oldnode was not updated if newnodes is an iterable
    if len(newnodes) == 1:
        newnode = newnodes[0]
        firstline = encoding.trim(
            repo[newnode].description().split("\n")[0], 50, '...')
        repo.ui.status(_("%s -> %s \"%s\"\n") % (
            short(oldnode), short(newnode), firstline))
コード例 #11
0
 def filter_cset_known_bug_ids(self, node, bugs):
     '''filter bug ids that already refer to this changeset from set.'''
     self.run('''select bug_id from longdescs where
                 bug_id in %s and thetext like "%%%s%%"''' %
              (bzmysql.sql_buglist(bugs.keys()), short(node)))
     for (id,) in self.cursor.fetchall():
         self.ui.status(_('bug %d already knows about changeset %s\n') %
                        (id, short(node)))
         del bugs[id]
コード例 #12
0
ファイル: notify.py プロジェクト: carlgao/lenga
    def send(self, node, count, data):
        '''send message.'''

        p = email.Parser.Parser()
        msg = p.parsestr(data)

        def fix_subject():
            '''try to make subject line exist and be useful.'''

            subject = msg['Subject']
            if not subject:
                if count > 1:
                    subject = _('%s: %d new changesets') % (self.root, count)
                else:
                    changes = self.repo.changelog.read(node)
                    s = changes[4].lstrip().split('\n', 1)[0].rstrip()
                    subject = '%s: %s' % (self.root, s)
            maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
            if maxsubject and len(subject) > maxsubject:
                subject = subject[:maxsubject-3] + '...'
            del msg['Subject']
            msg['Subject'] = subject

        def fix_sender():
            '''try to make message have proper sender.'''

            sender = msg['From']
            if not sender:
                sender = self.ui.config('email', 'from') or self.ui.username()
            if '@' not in sender or '@localhost' in sender:
                sender = self.fixmail(sender)
            del msg['From']
            msg['From'] = sender

        msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
        fix_subject()
        fix_sender()

        msg['X-Hg-Notification'] = 'changeset ' + short(node)
        if not msg['Message-Id']:
            msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
                                 (short(node), int(time.time()),
                                  hash(self.repo.root), socket.getfqdn()))
        msg['To'] = ', '.join(self.subs)

        msgtext = msg.as_string(0)
        if self.ui.configbool('notify', 'test', True):
            self.ui.write(msgtext)
            if not msgtext.endswith('\n'):
                self.ui.write('\n')
        else:
            self.ui.status(_('notify: sending %d subscribers %d changes\n') %
                           (len(self.subs), count))
            mail.sendmail(self.ui, util.email(msg['From']),
                          self.subs, msgtext)
コード例 #13
0
ファイル: bugzilla.py プロジェクト: Nurb432/plan9front
    def filter_unknown_bug_ids(self, node, ids):
        '''filter bug ids from list that already refer to this changeset.'''

        self.run('''select bug_id from longdescs where
                    bug_id in %s and thetext like "%%%s%%"''' %
                 (buglist(ids), short(node)))
        unknown = set(ids)
        for (id,) in self.cursor.fetchall():
            self.ui.status(_('bug %d already knows about changeset %s\n') %
                           (id, short(node)))
            unknown.discard(id)
        return sorted(unknown)
コード例 #14
0
ファイル: transplant.py プロジェクト: motlin/cyg
    def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
                 filter=None):
        '''apply the patch in patchfile to the repository as a transplant'''
        (manifest, user, (time, timezone), files, message) = cl[:5]
        date = "%d %d" % (time, timezone)
        extra = {'transplant_source': node}
        if filter:
            (user, date, message) = self.filter(filter, node, cl, patchfile)

        if log:
            # we don't translate messages inserted into commits
            message += '\n(transplanted from %s)' % nodemod.hex(node)

        self.ui.status(_('applying %s\n') % nodemod.short(node))
        self.ui.note('%s %s\n%s\n' % (user, date, message))

        if not patchfile and not merge:
            raise error.Abort(_('can only omit patchfile if merging'))
        if patchfile:
            try:
                files = set()
                patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
                files = list(files)
            except Exception as inst:
                seriespath = os.path.join(self.path, 'series')
                if os.path.exists(seriespath):
                    os.unlink(seriespath)
                p1 = repo.dirstate.p1()
                p2 = node
                self.log(user, date, message, p1, p2, merge=merge)
                self.ui.write(str(inst) + '\n')
                raise TransplantError(_('fix up the working directory and run '
                                        'hg transplant --continue'))
        else:
            files = None
        if merge:
            p1, p2 = repo.dirstate.parents()
            repo.setparents(p1, node)
            m = match.always(repo.root, '')
        else:
            m = match.exact(repo.root, '', files)

        n = repo.commit(message, user, date, extra=extra, match=m,
                        editor=self.getcommiteditor())
        if not n:
            self.ui.warn(_('skipping emptied changeset %s\n') %
                           nodemod.short(node))
            return None
        if not merge:
            self.transplants.set(n, node)

        return n
コード例 #15
0
ファイル: nautilus-thg.py プロジェクト: tdjordan/tortoisegit
    def get_property_pages(self, vfs_files):
        if len(vfs_files) != 1:
            return
        file = vfs_files[0]
        path = self.get_path_for_vfs_file(file)
        if path is None or file.is_directory():
            return
        repo = self.get_repo_for_path(path)
        if repo is None:
            return

        localpath = path[len(repo.root) + 1 :]
        emblem, status = self._get_file_status(repo, localpath)

        # Get the information from Mercurial
        ctx = repo.workingctx().parents()[0]
        try:
            fctx = ctx.filectx(localpath)
            rev = fctx.filelog().linkrev(fctx.filenode())
        except:
            rev = ctx.rev()
        ctx = repo.changectx(rev)
        node = short(ctx.node())
        date = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(ctx.date()[0]))
        parents = "\n".join([short(p.node()) for p in ctx.parents()])
        description = ctx.description()
        user = ctx.user()
        user = gobject.markup_escape_text(user)
        tags = ", ".join(ctx.tags())
        branch = ctx.branch()

        self.property_label = gtk.Label("Mercurial")

        table = gtk.Table(7, 2, False)
        table.set_border_width(5)
        table.set_row_spacings(5)
        table.set_col_spacings(5)

        self.__add_row(table, 0, "<b>Status</b>:", status)
        self.__add_row(table, 1, "<b>Last-Commit-Revision</b>:", str(rev))
        self.__add_row(table, 2, "<b>Last-Commit-Description</b>:", description)
        self.__add_row(table, 3, "<b>Last-Commit-Date</b>:", date)
        self.__add_row(table, 4, "<b>Last-Commit-User</b>:", user)
        if tags:
            self.__add_row(table, 5, "<b>Tags</b>:", tags)
        if branch != "default":
            self.__add_row(table, 6, "<b>Branch</b>:", branch)

        table.show()

        return (nautilus.PropertyPage("MercurialPropertyPage::status", self.property_label, table),)
コード例 #16
0
ファイル: push_printurls.py プロジェクト: Nephyrin/bzexport
def hook(ui, repo, node, hooktype, source=None, **kwargs):
    if source in ('pull', 'strip'):
        return 0

    root = ui.config('hgmo', 'repo_root', '/repo/hg/mozilla')

    if not repo.root.startswith(root):
        return 0

    repo_name = repo.root[len(root) + 1:]

    # All changesets from node to "tip" inclusive are part of this push.
    rev = repo.changectx(node).rev()
    tip = repo.changectx('tip').rev()
    tip_node = short(repo.changectx(tip).node())

    num_changes = tip + 1 - rev
    url = 'https://hg.mozilla.org/%s/' % repo_name

    if num_changes <= 10:
        plural = 's' if num_changes > 1 else ''
        print '\nView your change%s here:' % plural

        for i in xrange(rev, tip + 1):
            node = short(repo.changectx(i).node())
            print '  %srev/%s' % (url, node)
    else:
        print '\nView the pushlog for these changes here:'
        print '  %spushloghtml?changeset=%s' % (url, tip_node)

    # For repositories that report CI results to Treeherder, also output a
    # Treeherder url.
    treeherder_repo = ui.config('mozilla', 'treeherder_repo')
    if treeherder_repo:
        treeherder_base_url = 'https://treeherder.mozilla.org'
        print '\nFollow the progress of your build on Treeherder:'
        print '  %s/#/jobs?repo=%s&revision=%s' % (treeherder_base_url,
                                                   treeherder_repo,
                                                   tip_node)
        # if specifying a try build and talos jobs are enabled, suggest that
        # user use compareperf
        if treeherder_repo == 'try':
            msg = repo.changectx(tip).description()
            if ((' -t ' in msg or ' --talos ' in msg) and '-t none' not in msg
                and '--talos none' not in msg):
                print ('\nIt looks like this try push has talos jobs. Compare '
                       'performance against a baseline revision:')
                print ('  %s/perf.html#/comparechooser'
                       '?newProject=try&newRevision=%s' % (
                           treeherder_base_url, tip_node))
    return 0
コード例 #17
0
ファイル: __init__.py プロジェクト: Nephyrin/bzexport
def queue_info_string(repo, values):
    qparent_str = ''
    qtip_str = ''
    try:
        qparent_str = short(repo.lookup('qparent'))
        qtip_str = short(repo.lookup('qtip'))
    except error.RepoLookupError:
        qparent_str = short(repo.lookup('.'))
        qtip_str = qparent_str
    try:
        top_str = repo.mq.applied[-1].name
    except:
        top_str = '(none)'
    return '\nqparent: %s\nqtip: %s\ntop: %s' % (qparent_str, qtip_str, top_str)
コード例 #18
0
    def get_property_pages(self, vfs_files):
        if len(vfs_files) != 1:
            return
        file = vfs_files[0]
        path = self.get_path_for_vfs_file(file)
        if path is None or file.is_directory():
            return
        repo = self.get_repo_for_path(path)
        if repo is None:
            return
        localpath = path[len(repo.root)+1:]
        emblem, status = self._get_file_status(path, repo)

        # Get the information from Mercurial
        ctx = repo['.']
        try:
            fctx = ctx.filectx(localpath)
            rev = fctx.filelog().linkrev(fctx.filerev())
        except:
            rev = ctx.rev()
        ctx = repo.changectx(rev)
        node = short(ctx.node())
        date = util.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2')
        parents = '\n'.join([short(p.node()) for p in ctx.parents()])
        description = ctx.description()
        user = ctx.user()
        user = markup_escape_text(user)
        tags = ', '.join(ctx.tags())
        branch = ctx.branch()

        self.property_label = gtk.Label('Mercurial')

        self.table = gtk.Table(7, 2, False)
        self.table.set_border_width(5)
        self.table.set_row_spacings(5)
        self.table.set_col_spacings(5)

        self.__add_row(0, '<b>Status</b>:', status)
        self.__add_row(1, '<b>Last-Commit-Revision</b>:', str(rev))
        self.__add_row(2, '<b>Last-Commit-Description</b>:', description)
        self.__add_row(3, '<b>Last-Commit-Date</b>:', date)
        self.__add_row(4, '<b>Last-Commit-User</b>:', user)
        if tags:
            self.__add_row(5, '<b>Tags</b>:', tags)
        if branch != 'default':
            self.__add_row(6, '<b>Branch</b>:', branch)

        self.table.show()
        return caja.PropertyPage("MercurialPropertyPage::status",
                                     self.property_label, self.table),
コード例 #19
0
def debugindex(orig, ui, repo, file_=None, **opts):
    """dump the contents of an index file"""
    if (opts.get('changelog') or
        opts.get('manifest') or
        opts.get('dir') or
        not shallowrepo.requirement in repo.requirements or
        not repo.shallowmatch(file_)):
        return orig(ui, repo, file_, **opts)

    r = buildtemprevlog(repo, file_)

    # debugindex like normal
    format = opts.get('format', 0)
    if format not in (0, 1):
        raise error.Abort(_("unknown format %d") % format)

    generaldelta = r.version & revlog.FLAG_GENERALDELTA
    if generaldelta:
        basehdr = ' delta'
    else:
        basehdr = '  base'

    if format == 0:
        ui.write(("   rev    offset  length " + basehdr + " linkrev"
                  " nodeid       p1           p2\n"))
    elif format == 1:
        ui.write(("   rev flag   offset   length"
                  "     size " + basehdr + "   link     p1     p2"
                  "       nodeid\n"))

    for i in r:
        node = r.node(i)
        if generaldelta:
            base = r.deltaparent(i)
        else:
            base = r.chainbase(i)
        if format == 0:
            try:
                pp = r.parents(node)
            except Exception:
                pp = [nullid, nullid]
            ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
                    i, r.start(i), r.length(i), base, r.linkrev(i),
                    short(node), short(pp[0]), short(pp[1])))
        elif format == 1:
            pr = r.parentrevs(i)
            ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
                    i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
                    base, r.linkrev(i), pr[0], pr[1], short(node)))
コード例 #20
0
ファイル: hg.py プロジェクト: pombreda/bob
 def getTip(self):
     hg_ui = ui.ui()
     if hasattr(hg, 'peer'):
         repo = hg.peer(hg_ui, {}, self.uri)
     else:
         repo = hg.repository(hg_ui, self.uri)
     return short(repo.heads()[0])
コード例 #21
0
ファイル: webcommands.py プロジェクト: Nurb432/plan9front
def archive(web, req, tmpl):
    type_ = req.form.get('type', [None])[0]
    allowed = web.configlist("web", "allow_archive")
    key = req.form['node'][0]

    if type_ not in web.archives:
        msg = 'Unsupported archive type: %s' % type_
        raise ErrorResponse(HTTP_NOT_FOUND, msg)

    if not ((type_ in allowed or
        web.configbool("web", "allow" + type_, False))):
        msg = 'Archive type not allowed: %s' % type_
        raise ErrorResponse(HTTP_FORBIDDEN, msg)

    reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
    cnode = web.repo.lookup(key)
    arch_version = key
    if cnode == key or key == 'tip':
        arch_version = short(cnode)
    name = "%s-%s" % (reponame, arch_version)
    mimetype, artype, extension, encoding = web.archive_specs[type_]
    headers = [
        ('Content-Type', mimetype),
        ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
    ]
    if encoding:
        headers.append(('Content-Encoding', encoding))
    req.header(headers)
    req.respond(HTTP_OK)
    archival.archive(web.repo, req, cnode, artype, prefix=name)
    return []
コード例 #22
0
ファイル: basestore.py プロジェクト: html-shell/mozilla-build
    def verify(self, revs, contents=False):
        '''Verify the existence (and, optionally, contents) of every big
        file revision referenced by every changeset in revs.
        Return 0 if all is well, non-zero on any errors.'''
        failed = False

        self.ui.status(
            _('searching %d changesets for largefiles\n') % len(revs))
        verified = set()  # set of (filename, filenode) tuples

        for rev in revs:
            cctx = self.repo[rev]
            cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))

            for standin in cctx:
                if self._verifyfile(cctx, cset, contents, standin, verified):
                    failed = True

        numrevs = len(verified)
        numlfiles = len(set([fname for (fname, fnode) in verified]))
        if contents:
            self.ui.status(
                _('verified contents of %d revisions of %d largefiles\n') %
                (numrevs, numlfiles))
        else:
            self.ui.status(
                _('verified existence of %d revisions of %d largefiles\n') %
                (numrevs, numlfiles))
        return int(failed)
コード例 #23
0
ファイル: hgk.py プロジェクト: Distrotech/mercurial
def catcommit(ui, repo, n, prefix, ctx=None):
    nlprefix = '\n' + prefix
    if ctx is None:
        ctx = repo[n]
    # use ctx.node() instead ??
    ui.write(("tree %s\n" % short(ctx.changeset()[0])))
    for p in ctx.parents():
        ui.write(("parent %s\n" % p))

    date = ctx.date()
    description = ctx.description().replace("\0", "")
    ui.write(("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])))

    if 'committer' in ctx.extra():
        ui.write(("committer %s\n" % ctx.extra()['committer']))

    ui.write(("revision %d\n" % ctx.rev()))
    ui.write(("branch %s\n" % ctx.branch()))
    if obsolete.isenabled(repo, obsolete.createmarkersopt):
        if ctx.obsolete():
            ui.write(("obsolete\n"))
    ui.write(("phase %s\n\n" % ctx.phasestr()))

    if prefix != "":
        ui.write("%s%s\n" % (prefix,
                             description.replace('\n', nlprefix).strip()))
    else:
        ui.write(description + "\n")
    if prefix:
        ui.write('\0')
コード例 #24
0
ファイル: transplant.py プロジェクト: spraints/for-example
    def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
                 filter=None):
        '''apply the patch in patchfile to the repository as a transplant'''
        (manifest, user, (time, timezone), files, message) = cl[:5]
        date = "%d %d" % (time, timezone)
        extra = {'transplant_source': node}
        if filter:
            (user, date, message) = self.filter(filter, node, cl, patchfile)

        if log:
            # we don't translate messages inserted into commits
            message += '\n(transplanted from %s)' % revlog.hex(node)

        self.ui.status(_('applying %s\n') % short(node))
        self.ui.note('%s %s\n%s\n' % (user, date, message))

        if not patchfile and not merge:
            raise util.Abort(_('can only omit patchfile if merging'))
        if patchfile:
            try:
                files = set()
                patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
                files = list(files)
            except Exception, inst:
                seriespath = os.path.join(self.path, 'series')
                if os.path.exists(seriespath):
                    os.unlink(seriespath)
                p1 = repo.dirstate.p1()
                p2 = node
                self.log(user, date, message, p1, p2, merge=merge)
                self.ui.write(str(inst) + '\n')
                raise TransplantError(_('fix up the merge and run '
                                        'hg transplant --continue'))
コード例 #25
0
def sigwalk(repo):
    """
    walk over every sigs, yields a couple
    ((node, version, sig), (filename, linenumber))
    """

    def parsefile(fileiter, context):
        ln = 1
        for l in fileiter:
            if not l:
                continue
            yield (l.split(" ", 2), (context, ln))
            ln += 1

    # read the heads
    fl = repo.file(".hgsigs")
    for r in reversed(fl.heads()):
        fn = ".hgsigs|%s" % hgnode.short(r)
        for item in parsefile(fl.read(r).splitlines(), fn):
            yield item
    try:
        # read local signatures
        fn = "localsigs"
        for item in parsefile(repo.opener(fn), fn):
            yield item
    except IOError:
        pass
コード例 #26
0
ファイル: __init__.py プロジェクト: davidshepherd7/dotfiles
    def _showchanges(self, alines, blines, chunk, fixups):
        ui = self.ui

        def label(line, label):
            if line.endswith('\n'):
                line = line[:-1]
            return ui.label(line, label)

        # this is not optimized for perf but _showchanges only gets executed
        # with an extra command-line flag.
        a1, a2, b1, b2 = chunk
        aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
        for idx, fa1, fa2, fb1, fb2 in fixups:
            for i in xrange(fa1, fa2):
                aidxs[i - a1] = (max(idx, 1) - 1) // 2
            for i in xrange(fb1, fb2):
                bidxs[i - b1] = (max(idx, 1) - 1) // 2

        buf = [] # [(idx, content)]
        buf.append((0, label('@@ -%d,%d +%d,%d @@'
                             % (a1, a2 - a1, b1, b2 - b1), 'diff.hunk')))
        buf += [(aidxs[i - a1], label('-' + alines[i], 'diff.deleted'))
                for i in xrange(a1, a2)]
        buf += [(bidxs[i - b1], label('+' + blines[i], 'diff.inserted'))
                for i in xrange(b1, b2)]
        for idx, line in buf:
            shortnode = idx and node.short(self.fctxs[idx].node()) or ''
            ui.write(ui.label(shortnode[0:7].ljust(8), 'absorb.node') +
                     line + '\n')
コード例 #27
0
def rhentries(ui, repo, path='', **opts):
    """output the entries of the specified directory"""
    rev = opts.get('rev')
    pathprefix = (path.rstrip('/') + '/').lstrip('/')

    # TODO: clean up
    dirs, files = {}, {}
    mf = repo[rev].manifest()
    for f in repo[rev]:
        if not f.startswith(pathprefix):
            continue

        name = re.sub(r'/.*', '', f[len(pathprefix):])
        if '/' in f[len(pathprefix):]:
            dirs[name] = (name,)
        else:
            try:
                fctx = repo.filectx(f, fileid=mf[f])
                ctx = fctx.changectx()
                tm, tzoffset = ctx.date()
                localtime = int(tm) + tzoffset - time.timezone
                files[name] = (ctx.rev(), node.short(ctx.node()), localtime,
                               fctx.size(), name)
            except LookupError:  # TODO: when this occurs?
                pass

    ui.write(':dirs: name\n')
    for n, v in sorted(dirs.iteritems(), key=lambda e: e[0]):
        ui.write(' '.join(v) + '\n')

    ui.write(':files: rev node time size name\n')
    for n, v in sorted(files.iteritems(), key=lambda e: e[0]):
        ui.write(' '.join(str(e) for e in v) + '\n')
コード例 #28
0
    def update(self):
        self.saveSettings()
        cmdline = ['rupdate']

        if self.discard_chk.isChecked():
            cmdline.append('--clean')
        if self.push_chk.isChecked():
            cmdline.append('--push')
        if self.newbranch_chk.isChecked():
            cmdline.append('--new-branch')
        if self.force_chk.isChecked():
            cmdline.append('--force')

        dest = hglib.fromunicode(self.path_combo.currentText())
        cmdline.append('-d')
        cmdline.append(dest)

        # Refer to the revision by the short hash.
        rev = hglib.fromunicode(self.rev_combo.currentText())
        revShortHash = node.short(self.repo[rev].node())
        cmdline.append(revShortHash)

        # start updating
        self.repo.incrementBusyCount()
        self.cmd.run(cmdline)
コード例 #29
0
ファイル: histedit.py プロジェクト: RayFerr000/PLTL
    def run(self):
        rulectx = self.repo[self.node]
        if rulectx.parents()[0].node() == self.state.parentctxnode:
            self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
            return rulectx, []

        return super(pick, self).run()
コード例 #30
0
ファイル: redminehelper.py プロジェクト: 101studio/redmine
def _manifest(ui, repo, path, rev):
    ctx = repo.changectx(rev)
    ui.write('<manifest revision="%d" path="%s">\n'
             % (ctx.rev(), _u(path)))

    known = set()
    pathprefix = (path.rstrip('/') + '/').lstrip('/')
    for f, n in sorted(ctx.manifest().iteritems(), key=lambda e: e[0]):
        if not f.startswith(pathprefix):
            continue
        name = re.sub(r'/.*', '/', f[len(pathprefix):])
        if name in known:
            continue
        known.add(name)

        if name.endswith('/'):
            ui.write('<dir name="%s"/>\n'
                     % _x(urllib.quote(name[:-1])))
        else:
            fctx = repo.filectx(f, fileid=n)
            tm, tzoffset = fctx.date()
            ui.write('<file name="%s" revision="%d" node="%s" '
                     'time="%d" size="%d"/>\n'
                     % (_u(name), fctx.rev(), _x(node.short(fctx.node())),
                        tm, fctx.size(), ))

    ui.write('</manifest>\n')
コード例 #31
0
ファイル: redminehelper.py プロジェクト: kentana/heroku
def _tags(ui, repo):
    # see mercurial/commands.py:tags
    for t, n in reversed(repo.tagslist()):
        if t in _SPECIAL_TAGS:
            continue
        try:
            r = repo.changelog.rev(n)
        except error.LookupError:
            continue
        ui.write('<tag revision="%d" node="%s" name="%s"/>\n' %
                 (r, _x(node.short(n)), _x(t)))
コード例 #32
0
def _tip(ui, repo):
    # see mercurial/commands.py:tip
    def tiprev():
        try:
            return len(repo) - 1
        except TypeError:  # Mercurial < 1.1
            return repo.changelog.count() - 1

    tipctx = repo.changectx(tiprev())
    ui.write('<tip revision="%d" node="%s"/>\n' %
             (tipctx.rev(), _x(node.short(tipctx.node()))))
コード例 #33
0
ファイル: __init__.py プロジェクト: edunham/vcttools-cinnabar
 def compute_patch_name(action, force_name, node=None, revisions=None):
     if force_name:
         if name_used[0]:
             raise util.Abort('option "-n" not valid when backing out multiple changes')
         name_used[0] = True
         return force_name
     else:
         if node:
             return '%s-%s' % (action, short(node))
         else:
             return '%s-%d-changesets' % (action, len(revisions))
コード例 #34
0
ファイル: sync.py プロジェクト: noman798/system
 def commitmerge(self):
     message = (cmdutil.logmessage(self.ui, self.opts) or
                ('Automated merge with %s' %
                 urllib2.unquote(util.removeauth(self.remoterepository.url()))))
     editor = cmdutil.commiteditor
     if self.opts.get('edit'):
         editor = cmdutil.commitforceeditor
     n = self.repo.commit(message, self.opts['user'], self.opts['date'], editor=editor)
     self.ui.status(_('new changeset %d:%s merges remote changes '
                 'with local\n') % (self.repo.changelog.rev(n),
                                    short(n)))
コード例 #35
0
ファイル: hgk.py プロジェクト: xunfeng1980/intellij-community
    def __difftree(repo, node1, node2, files=None):
        assert node2 is not None
        if files is None:
            files = []
        mmap = repo[node1].manifest()
        mmap2 = repo[node2].manifest()
        m = scmutil.match(repo[node1], files)
        st = repo.status(node1, node2, m)
        empty = short(repo.nullid)

        for f in st.modified:
            # TODO get file permissions
            ui.writenoi18n(b":100664 100664 %s %s M\t%s\t%s\n" %
                           (short(mmap[f]), short(mmap2[f]), f, f))
        for f in st.added:
            ui.writenoi18n(b":000000 100664 %s %s N\t%s\t%s\n" %
                           (empty, short(mmap2[f]), f, f))
        for f in st.removed:
            ui.writenoi18n(b":100664 000000 %s %s D\t%s\t%s\n" %
                           (short(mmap[f]), empty, f, f))
コード例 #36
0
ファイル: config.py プロジェクト: stahta01/wxGlade
def get_hg_version():
    "Query the local hg repository to get the current release or return None"
    try:
        from mercurial.hg import repository
        from mercurial.ui import ui
        from mercurial.node import short
        from mercurial.error import RepoError
    except:
        return None

    # try to open local hg repository
    try:
        repo = repository(ui(), os.path.dirname(__file__))
    except RepoError:
        # no mercurial repository found
        return None

    release = ''
    context = repo[None]
    parents = context.parents()
    repo_changed = context.files() + context.deleted()
    if len(parents) == 1 and not repo_changed:
        # release tag isn't at tip it's -2 (one below tip)
        parents = parents[0].parents()
        node = parents[0].node()
        tags = repo.nodetags(node)
        # look for the special 'rel_X_X_X' or 'rel_X_X' tag
        for tag in tags:
            if tag.startswith('rel_') and len(tag) > 4:
                release = tag[4:].replace('_', '.')
                break
        # handle untagged release e.g. tip
        if not release:
            release = short(node)
    else:
        release = '%s' % '+'.join([short(p.node()) for p in parents])

    suffix_changed = repo_changed and '+' or ''

    ver = '%s%s' % (release, suffix_changed)
    return ver
コード例 #37
0
ファイル: extdiff.py プロジェクト: michalliu/MyCygwin
def snapshot(ui, repo, files, node, tmproot):
    '''snapshot files as of some revision
    if not using snapshot, -I/-X does not work and recursive diff
    in tools like kdiff3 and meld displays too many files.'''
    dirname = os.path.basename(repo.root)
    if dirname == "":
        dirname = "root"
    if node is not None:
        dirname = '%s.%s' % (dirname, short(node))
    base = os.path.join(tmproot, dirname)
    os.mkdir(base)
    if node is not None:
        ui.note(
            _('making snapshot of %d files from rev %s\n') %
            (len(files), short(node)))
    else:
        ui.note(
            _('making snapshot of %d files from working directory\n') %
            (len(files)))
    wopener = scmutil.opener(base)
    fns_and_mtime = []
    ctx = repo[node]
    for fn in sorted(files):
        wfn = util.pconvert(fn)
        if wfn not in ctx:
            # File doesn't exist; could be a bogus modify
            continue
        ui.note('  %s\n' % wfn)
        dest = os.path.join(base, wfn)
        fctx = ctx[wfn]
        data = repo.wwritedata(wfn, fctx.data())
        if 'l' in fctx.flags():
            wopener.symlink(data, wfn)
        else:
            wopener.write(wfn, data)
            if 'x' in fctx.flags():
                util.setflags(dest, False, True)
        if node is None:
            fns_and_mtime.append(
                (dest, repo.wjoin(fn), os.lstat(dest).st_mtime))
    return dirname, fns_and_mtime
コード例 #38
0
def snapshot(ui, repo, files, node, tmproot, listsubrepos):
    '''snapshot files as of some revision
    if not using snapshot, -I/-X does not work and recursive diff
    in tools like kdiff3 and meld displays too many files.'''
    dirname = os.path.basename(repo.root)
    if dirname == "":
        dirname = "root"
    if node is not None:
        dirname = '%s.%s' % (dirname, short(node))
    base = os.path.join(tmproot, dirname)
    os.mkdir(base)
    fnsandstat = []

    if node is not None:
        ui.note(
            _('making snapshot of %d files from rev %s\n') %
            (len(files), short(node)))
    else:
        ui.note(
            _('making snapshot of %d files from working directory\n') %
            (len(files)))

    if files:
        repo.ui.setconfig("ui", "archivemeta", False)

        archival.archive(repo,
                         base,
                         node,
                         'files',
                         matchfn=scmutil.matchfiles(repo, files),
                         subrepos=listsubrepos)

        for fn in sorted(files):
            wfn = util.pconvert(fn)
            ui.note('  %s\n' % wfn)

            if node is None:
                dest = os.path.join(base, wfn)

                fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
    return dirname, fnsandstat
コード例 #39
0
ファイル: hg-rheads.py プロジェクト: kfirprods/tpp
def main():
    """print (possibly remote) heads

    Prints a series of lines consisting of hashes and branch names.
    Specify a local or remote repository, defaulting to the configured remote.
    """
    repo = sys.argv[1]

    other = hg.peer(ui.ui(), {}, repo)

    for tag, heads in other.branchmap().iteritems():
        print "%s %s" % (node.short(heads[0]), tag)
コード例 #40
0
    def resume(self, repo, source, opts):
        '''recover last transaction and apply remaining changesets'''
        if os.path.exists(os.path.join(self.path, 'journal')):
            n, node = self.recover(repo, source, opts)
            if n:
                self.ui.status(
                    _('%s transplanted as %s\n') % (short(node), short(n)))
            else:
                self.ui.status(
                    _('%s skipped due to empty diff\n') % (short(node), ))
        seriespath = os.path.join(self.path, 'series')
        if not os.path.exists(seriespath):
            self.transplants.write()
            return
        nodes, merges = self.readseries()
        revmap = {}
        for n in nodes:
            revmap[source.changelog.rev(n)] = n
        os.unlink(seriespath)

        self.apply(repo, source, revmap, merges, opts)
コード例 #41
0
def backlog(ui, repo, *changesets, **opts):
    """show the backlog (draft changesets) of specified committer in the form
    of a review list.
    """
    revs = repo.revs('draft()')
    ctxhexs = (node.short(repo.lookup(rev)) for rev in revs)

    committer = opts.get('committer', None)

    with build_proxy(ui, opts) as client:
        rev = show_review(client, ctxhexs, committer)
        _format_review_result(ui, repo, client, rev)
コード例 #42
0
ファイル: sync.py プロジェクト: noman798/system
    def merge(self, initialrevision, working, other):
        self.ui.status(_('merging with %d:%s\n') %
                  (self.repo.changelog.rev(other), short(other)))

        if working != initialrevision: self.updateClean(working)

        if hg.merge(self.repo, other, remind=False):
            if not self.ui.promptchoice(_('Merge failed, do you want to revert [Y/n]: '), ['&Yes', '&No']):
                self.updateClean(initialrevision)
                raise util.Abort('merge failed and reverted, please merge remaining heads manually and sync again')
            else:
                raise util.Abort('merge failed, please resolve remaining merge conflicts manually, commit and sync again')
コード例 #43
0
def _bundle(repo, bases, heads, node, suffix, compress=True):
    """create a bundle with the specified revisions as a backup"""
    cg = repo.changegroupsubset(bases, heads, 'strip')
    backupdir = repo.join("strip-backup")
    if not os.path.isdir(backupdir):
        os.mkdir(backupdir)
    name = os.path.join(backupdir, "%s-%s.hg" % (short(node), suffix))
    if compress:
        bundletype = "HG10BZ"
    else:
        bundletype = "HG10UN"
    return changegroup.writebundle(cg, name, bundletype)
コード例 #44
0
def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
    halt = False
    seen = set()
    # we try to walk changesets in reverse order from newest to
    # oldest, so that if we see a file multiple times, we take the
    # newest version as canonical. this prevents us from blocking a
    # changegroup that contains an unacceptable commit followed later
    # by a commit that fixes the problem.
    tip = repo[b'tip']
    for rev in pycompat.xrange(
        repo.changelog.tiprev(), repo[node].rev() - 1, -1
    ):
        c = repo[rev]
        for f in c.files():
            if f in seen or f not in tip or f not in c:
                continue
            seen.add(f)
            data = c[f].data()
            if not stringutil.binary(data) and newline in data:
                if not halt:
                    ui.warn(
                        _(
                            b'attempt to commit or push text file(s) '
                            b'using %s line endings\n'
                        )
                        % newlinestr[newline]
                    )
                ui.warn(_(b'in %s: %s\n') % (short(c.node()), f))
                halt = True
    if halt and hooktype == b'pretxnchangegroup':
        crlf = newlinestr[newline].lower()
        filter = filterstr[newline]
        ui.warn(
            _(
                b'\nTo prevent this mistake in your local repository,\n'
                b'add to Mercurial.ini or .hg/hgrc:\n'
                b'\n'
                b'[hooks]\n'
                b'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
                b'\n'
                b'and also consider adding:\n'
                b'\n'
                b'[extensions]\n'
                b'win32text =\n'
                b'[encode]\n'
                b'** = %sencode:\n'
                b'[decode]\n'
                b'** = %sdecode:\n'
            )
            % (crlf, crlf, filter, filter)
        )
    return halt
コード例 #45
0
    def check(self, ctx):
        if self.is_uplift:
            return True

        # Ignore merge changesets
        if len(ctx.parents()) > 1:
            return True

        # Ignore backouts
        if is_backout(ctx.description()):
            return True

        # Ignore changes that don't touch .webidl files
        webidl_files = [f for f in ctx.files() if f.endswith('.webidl')]
        if not webidl_files:
            return True

        # Allow patches authored by peers
        if is_peer_email(util.email(ctx.user())):
            return True

        # Categorise files
        file_counts = collections.Counter()
        review_required_files = []
        for f in webidl_files:
            file_counts['total'] += 1
            if f.startswith(CHROME_WEBIDL_ROOT):
                file_counts['chrome'] += 1
            elif f.startswith(SERVO_ROOT):
                file_counts['servo'] += 1
            else:
                review_required_files.append(f)

        # Allow chrome-only and servo-only changes
        if file_counts['chrome'] + file_counts['servo'] == file_counts[
                'total']:
            if file_counts['chrome']:
                print_notice(self.ui, CHROME_ONLY)
            if file_counts['servo']:
                print_notice(self.ui, SERVO_ONLY)
            return True

        # Allow if reviewed by any peer
        requal = list(parse_requal_reviewers(ctx.description()))
        if any(is_peer_nick(nick) for nick in requal):
            return True

        # Reject
        print_banner(
            self.ui, 'error', MISSING_REVIEW %
            (short(ctx.node()), '\n'.join(review_required_files)))
        return False
コード例 #46
0
ファイル: absorb.py プロジェクト: dothq/mozillabuild
 def _checkoutlinelogwithedits(self):
     """() -> [str]. prompt all lines for edit"""
     alllines = self.linelog.getalllines()
     # header
     editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
                     'exists in the changeset to the top\nHG:\n') %
                   self.fctxs[-1].path())
     # [(idx, fctx)]. hide the dummy emptyfilecontext
     visiblefctxs = [(i, f) for i, f in enumerate(self.fctxs)
                     if not isinstance(f, emptyfilecontext)]
     for i, (j, f) in enumerate(visiblefctxs):
         editortext += (_('HG: %s/%s %s %s\n') %
                        ('|' * i, '-' *
                         (len(visiblefctxs) - i + 1), node.short(
                             f.node()), f.description().split('\n', 1)[0]))
     editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
     # figure out the lifetime of a line, this is relatively inefficient,
     # but probably fine
     lineset = defaultdict(lambda: set())  # {(llrev, linenum): {llrev}}
     for i, f in visiblefctxs:
         self.linelog.annotate((i + 1) * 2)
         for l in self.linelog.annotateresult:
             lineset[l].add(i)
     # append lines
     for l in alllines:
         editortext += (
             '    %s : %s' %
             (''.join([('y' if i in lineset[l] else ' ')
                       for i, _f in visiblefctxs]), self._getline(l)))
     # run editor
     editedtext = self.ui.edit(editortext, '', action='absorb')
     if not editedtext:
         raise error.Abort(_('empty editor text'))
     # parse edited result
     contents = ['' for i in self.fctxs]
     leftpadpos = 4
     colonpos = leftpadpos + len(visiblefctxs) + 1
     for l in mdiff.splitnewlines(editedtext):
         if l.startswith('HG:'):
             continue
         if l[colonpos - 1:colonpos + 2] != ' : ':
             raise error.Abort(_('malformed line: %s') % l)
         linecontent = l[colonpos + 2:]
         for i, ch in enumerate(pycompat.bytestr(l[leftpadpos:colonpos -
                                                   1])):
             if ch == 'y':
                 contents[visiblefctxs[i][0]] += linecontent
     # chunkstats is hard to calculate if anything changes, therefore
     # set them to just a simple value (1, 1).
     if editedtext != editortext:
         self.chunkstats = [1, 1]
     return contents
コード例 #47
0
def sigcheck(ui, repo, rev):
    """verify all the signatures there may be for a particular revision"""
    mygpg = newgpg(ui)
    rev = repo.lookup(rev)
    hexrev = hgnode.hex(rev)
    keys = []

    for data, context in sigwalk(repo):
        node, version, sig = data
        if node == hexrev:
            k = getkeys(ui, repo, mygpg, data, context)
            if k:
                keys.extend(k)

    if not keys:
        ui.write(_(b"no valid signature for %s\n") % hgnode.short(rev))
        return

    # print summary
    ui.write(_(b"%s is signed by:\n") % hgnode.short(rev))
    for key in keys:
        ui.write(b" %s\n" % keystr(ui, key))
コード例 #48
0
 def filter_cset_known_bug_ids(self, node, bugs):
     '''remove bug IDs where node occurs in comment text from bugs.'''
     sn = short(node)
     for bugid in list(bugs.keys()):
         burl = self.apiurl(('bug', bugid, 'comment'),
                            include_fields='text')
         result = self._fetch(burl)
         comments = result['bugs'][str(bugid)]['comments']
         if any(sn in c['text'] for c in comments):
             self.ui.status(
                 _('bug %d already knows about changeset %s\n') %
                 (bugid, sn))
             del bugs[bugid]
コード例 #49
0
ファイル: repair.py プロジェクト: zeroincombenze/VME
def _bundle(repo, bases, heads, node, suffix, compress=True):
    """create a bundle with the specified revisions as a backup"""
    cg = changegroup.changegroupsubset(repo, bases, heads, 'strip')
    backupdir = "strip-backup"
    vfs = repo.vfs
    if not vfs.isdir(backupdir):
        vfs.mkdir(backupdir)
    name = "%s/%s-%s.hg" % (backupdir, short(node), suffix)
    if compress:
        bundletype = "HG10BZ"
    else:
        bundletype = "HG10UN"
    return changegroup.writebundle(cg, name, bundletype, vfs)
コード例 #50
0
def tasks(ui, repo, *changesets, **opts):
    """show tasks related to the given revision.

    By default, the revision used is the parent of the working
    directory: use -r/--rev to specify a different revision.

    By default, the forge url used is https://www.cubicweb.org/. Use
    -U/--endpoint to specify a different cwclientlib endpoint. The
    endpoint id of the forge can be permanently defined into one of
    the mercurial configuration file::

    [jpl]
    endpoint = https://www.cubicweb.org/

    By default, done tasks are not displayed: use -a/--all to not filter
    tasks and display all.

    """
    changesets += tuple(opts.get('rev', []))
    if not changesets:
        changesets = ('.')
    revs = scmutil.revrange(repo, changesets)
    if not revs:
        raise util.Abort(
            _('no working directory or revision not found: '
              'please specify a known revision'))
    # we need to see hidden cs from here
    repo = repo.unfiltered()

    for rev in revs:
        precs = scmutil.revrange(repo, (rev, 'allprecursors(%s)' % rev))
        ctxhexs = list((node.short(repo.lookup(lrev)) for lrev in precs))
        showall = opts.get('all', None)
        with build_proxy(ui, opts) as client:
            try:
                print_tasks(client, ui, ctxhexs, showall=showall)
            except Exception as e:
                ui.write('no patch or no tasks for %s\n' %
                         node.short(repo.lookup(rev)))
コード例 #51
0
ファイル: histedit.py プロジェクト: CSCI-362-02-2015/RedTeam
 def continueclean(self):
     """Continues the action when the working copy is clean. The default
     behavior is to accept the current commit as the new version of the
     rulectx."""
     ctx = self.repo['.']
     if ctx.node() == self.state.parentctxnode:
         self.repo.ui.warn(_('%s: empty changeset\n') %
                           node.short(self.node))
         return ctx, [(self.node, tuple())]
     if ctx.node() == self.node:
         # Nothing changed
         return ctx, []
     return ctx, [(self.node, (ctx.node(),))]
コード例 #52
0
    def recover(self, repo, source, opts):
        '''commit working directory using journal metadata'''
        node, user, date, message, parents = self.readlog()
        merge = False

        if not user or not date or not message or not parents[0]:
            raise util.Abort(_('transplant log file is corrupt'))

        parent = parents[0]
        if len(parents) > 1:
            if opts.get('parent'):
                parent = source.lookup(opts['parent'])
                if parent not in parents:
                    raise util.Abort(_('%s is not a parent of %s') %
                                     (short(parent), short(node)))
            else:
                merge = True

        extra = {'transplant_source': node}
        wlock = repo.wlock()
        try:
            p1, p2 = repo.dirstate.parents()
            if p1 != parent:
                raise util.Abort(
                    _('working dir not at transplant parent %s') %
                                 revlog.hex(parent))
            if merge:
                repo.setparents(p1, parents[1])
            n = repo.commit(message, user, date, extra=extra,
                            editor=self.editor)
            if not n:
                raise util.Abort(_('commit failed'))
            if not merge:
                self.transplants.set(n, node)
            self.unlog()

            return n, node
        finally:
            wlock.release()
コード例 #53
0
def graph(web, req, tmpl):

    rev = webutil.changectx(web.repo, req).rev()
    bg_height = 39
    revcount = web.maxshortchanges
    if 'revcount' in req.form:
        revcount = int(req.form.get('revcount', [revcount])[0])
        tmpl.defaults['sessionvars']['revcount'] = revcount

    lessvars = copy.copy(tmpl.defaults['sessionvars'])
    lessvars['revcount'] = revcount / 2
    morevars = copy.copy(tmpl.defaults['sessionvars'])
    morevars['revcount'] = revcount * 2

    max_rev = len(web.repo) - 1
    revcount = min(max_rev, revcount)
    revnode = web.repo.changelog.node(rev)
    revnode_hex = hex(revnode)
    uprev = min(max_rev, rev + revcount)
    downrev = max(0, rev - revcount)
    count = len(web.repo)
    changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx)

    dag = graphmod.revisions(web.repo, rev, downrev)
    tree = list(graphmod.colored(dag))
    canvasheight = (len(tree) + 1) * bg_height - 27
    data = []
    for (id, type, ctx, vtx, edges) in tree:
        if type != graphmod.CHANGESET:
            continue
        node = short(ctx.node())
        age = templatefilters.age(ctx.date())
        desc = templatefilters.firstline(ctx.description())
        desc = cgi.escape(templatefilters.nonempty(desc))
        user = cgi.escape(templatefilters.person(ctx.user()))
        branch = ctx.branch()
        branch = branch, web.repo.branchtags().get(branch) == ctx.node()
        data.append((node, vtx, edges, desc, user, age, branch, ctx.tags()))

    return tmpl('graph',
                rev=rev,
                revcount=revcount,
                uprev=uprev,
                lessvars=lessvars,
                morevars=morevars,
                downrev=downrev,
                canvasheight=canvasheight,
                jsdata=data,
                bg_height=bg_height,
                node=revnode_hex,
                changenav=changenav)
コード例 #54
0
def _branches(ui, repo):
    # see mercurial/commands.py:branches
    def iterbranches():
        for t, n in repo.branchtags().iteritems():
            yield t, n, repo.changelog.rev(n)
    def branchheads(branch):
        try:
            return repo.branchheads(branch, closed=False)
        except TypeError:  # Mercurial < 1.2
            return repo.branchheads(branch)
    for t, n, r in sorted(iterbranches(), key=lambda e: e[2], reverse=True):
        if repo.lookup(r) in branchheads(t):
            ui.write('<branch revision="%d" node="%s" name="%s"/>\n'
                     % (r, _x(node.short(n)), _x(t)))
コード例 #55
0
ファイル: histedit.py プロジェクト: CSCI-362-02-2015/RedTeam
def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
    if isinstance(nodelist, str):
        nodelist = [nodelist]
    if os.path.exists(os.path.join(repo.path, 'histedit-state')):
        state = histeditstate(repo)
        state.read()
        histedit_nodes = set([repo[rulehash].node() for (action, rulehash)
                             in state.rules if rulehash in repo])
        strip_nodes = set([repo[n].node() for n in nodelist])
        common_nodes = histedit_nodes & strip_nodes
        if common_nodes:
            raise error.Abort(_("histedit in progress, can't strip %s")
                             % ', '.join(node.short(x) for x in common_nodes))
    return orig(ui, repo, nodelist, *args, **kwargs)
コード例 #56
0
ファイル: webcommands.py プロジェクト: gobizen/hg-stable
def archive(web, req, tmpl):
    type_ = req.form.get('type', [None])[0]
    allowed = web.configlist("web", "allow_archive")
    key = req.form['node'][0]

    if type_ not in web.archives:
        msg = 'Unsupported archive type: %s' % type_
        raise ErrorResponse(HTTP_NOT_FOUND, msg)

    if not ((type_ in allowed
             or web.configbool("web", "allow" + type_, False))):
        msg = 'Archive type not allowed: %s' % type_
        raise ErrorResponse(HTTP_FORBIDDEN, msg)

    reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
    cnode = web.repo.lookup(key)
    arch_version = key
    if cnode == key or key == 'tip':
        arch_version = short(cnode)
    name = "%s-%s" % (reponame, arch_version)

    ctx = webutil.changectx(web.repo, req)
    pats = []
    matchfn = None
    file = req.form.get('file', None)
    if file:
        pats = ['path:' + file[0]]
        matchfn = scmutil.match(ctx, pats, default='path')
        if pats:
            files = [f for f in ctx.manifest().keys() if matchfn(f)]
            if not files:
                raise ErrorResponse(HTTP_NOT_FOUND,
                                    'file(s) not found: %s' % file[0])

    mimetype, artype, extension, encoding = web.archive_specs[type_]
    headers = [('Content-Disposition',
                'attachment; filename=%s%s' % (name, extension))]
    if encoding:
        headers.append(('Content-Encoding', encoding))
    req.headers.extend(headers)
    req.respond(HTTP_OK, mimetype)

    archival.archive(web.repo,
                     req,
                     cnode,
                     artype,
                     prefix=name,
                     matchfn=matchfn,
                     subrepos=web.configbool("web", "archivesubrepos"))
    return []
コード例 #57
0
def hook(ui, repo, hooktype, node, source=None, **kwargs):
    if source in ('pull', 'strip'):
        return 0

    for rev in range(repo[node].rev(), len(repo)):
        ctx = repo[rev]
        if rev == 0:
            continue

        if ctx.p1().node() == nullid:
            ui.write(MESSAGE % short(ctx.hex()))
            return 1

    return 0
コード例 #58
0
ファイル: sqlitestore.py プロジェクト: JesseDavids/mqtta
    def verifyintegrity(self, state):
        state[b'skipread'] = set()

        for rev in self:
            node = self.node(rev)

            try:
                self.revision(node)
            except Exception as e:
                yield sqliteproblem(
                    error=_(b'unpacking %s: %s') % (short(node), e), node=node
                )

                state[b'skipread'].add(node)
コード例 #59
0
def _docensor(ui, repo, path, rev=b'', tombstone=b'', **opts):
    if not path:
        raise error.Abort(_(b'must specify file path to censor'))
    if not rev:
        raise error.Abort(_(b'must specify revision to censor'))

    wctx = repo[None]

    m = scmutil.match(wctx, (path,))
    if m.anypats() or len(m.files()) != 1:
        raise error.Abort(_(b'can only specify an explicit filename'))
    path = m.files()[0]
    flog = repo.file(path)
    if not len(flog):
        raise error.Abort(_(b'cannot censor file with no history'))

    rev = scmutil.revsingle(repo, rev, rev).rev()
    try:
        ctx = repo[rev]
    except KeyError:
        raise error.Abort(_(b'invalid revision identifier %s') % rev)

    try:
        fctx = ctx.filectx(path)
    except error.LookupError:
        raise error.Abort(_(b'file does not exist at revision %s') % rev)

    fnode = fctx.filenode()
    heads = []
    for headnode in repo.heads():
        hc = repo[headnode]
        if path in hc and hc.filenode(path) == fnode:
            heads.append(hc)
    if heads:
        headlist = b', '.join([short(c.node()) for c in heads])
        raise error.Abort(
            _(b'cannot censor file in heads (%s)') % headlist,
            hint=_(b'clean/delete and commit first'),
        )

    wp = wctx.parents()
    if ctx.node() in [p.node() for p in wp]:
        raise error.Abort(
            _(b'cannot censor working directory'),
            hint=_(b'clean/delete/update first'),
        )

    with repo.transaction(b'censor') as tr:
        flog.censorrevision(tr, fnode, tombstone=tombstone)
コード例 #60
0
def pointersfromctx(ctx):
    """return a dict {path: pointer} for given single changectx"""
    result = {}
    for f in ctx.files():
        if f not in ctx:
            continue
        fctx = ctx[f]
        if not _islfs(fctx.filelog(), fctx.filenode()):
            continue
        try:
            result[f] = pointer.deserialize(fctx.rawdata())
        except pointer.InvalidPointer as ex:
            raise error.Abort(_('lfs: corrupted pointer (%s@%s): %s\n')
                              % (f, short(ctx.node()), ex))
    return result