Exemplo n.º 1
0
def _docreatecmd(ui, repo, pats, opts):
    def mutableancestors(ctx):
        """return all mutable ancestors for ctx (included)

        Much faster than the revset ancestors(ctx) & draft()"""
        seen = set([nodemod.nullrev])
        visit = collections.deque()
        visit.append(ctx)
        while visit:
            ctx = visit.popleft()
            yield ctx.node()
            for parent in ctx.parents():
                rev = parent.rev()
                if rev not in seen:
                    seen.add(rev)
                    if parent.mutable():
                        visit.append(parent)

    wctx = repo[None]
    parents = wctx.parents()
    if len(parents) > 1:
        raise error.Abort(_('cannot shelve while merging'))
    parent = parents[0]
    origbranch = wctx.branch()

    # we never need the user, so we use a generic user for all shelve operations
    user = '******'
    label = repo._activebookmark or parent.branch() or 'default'

    # slashes aren't allowed in filenames, therefore we rename it
    label = label.replace('/', '_')

    def gennames():
        yield label
        for i in xrange(1, 100):
            yield '%s-%02d' % (label, i)

    if parent.node() != nodemod.nullid:
        desc = "changes to: %s" % parent.description().split('\n', 1)[0]
    else:
        desc = '(changes in empty repository)'

    if not opts.get('message'):
        opts['message'] = desc

    name = opts.get('name')

    lock = tr = None
    try:
        lock = repo.lock()

        # use an uncommitted transaction to generate the bundle to avoid
        # pull races. ensure we don't print the abort message to stderr.
        tr = repo.transaction('commit', report=lambda x: None)

        if name:
            if shelvedfile(repo, name, 'hg').exists():
                raise error.Abort(
                    _("a shelved change named '%s' already exists") % name)
        else:
            for n in gennames():
                if not shelvedfile(repo, n, 'hg').exists():
                    name = n
                    break
            else:
                raise error.Abort(
                    _("too many shelved changes named '%s'") % label)

        # ensure we are not creating a subdirectory or a hidden file
        if '/' in name or '\\' in name:
            raise error.Abort(
                _('shelved change names may not contain slashes'))
        if name.startswith('.'):
            raise error.Abort(_("shelved change names may not start with '.'"))
        interactive = opts.get('interactive', False)
        includeunknown = (opts.get('unknown', False)
                          and not opts.get('addremove', False))

        extra = {}
        if includeunknown:
            s = repo.status(match=scmutil.match(repo[None], pats, opts),
                            unknown=True)
            if s.unknown:
                extra['shelve_unknown'] = '\0'.join(s.unknown)
                repo[None].add(s.unknown)

        if _iswctxonnewbranch(repo) and not _isbareshelve(pats, opts):
            # In non-bare shelve we don't store newly created branch
            # at bundled commit
            repo.dirstate.setbranch(repo['.'].branch())

        def commitfunc(ui, repo, message, match, opts):
            hasmq = util.safehasattr(repo, 'mq')
            if hasmq:
                saved, repo.mq.checkapplied = repo.mq.checkapplied, False
            backup = repo.ui.backupconfig('phases', 'new-commit')
            try:
                repo.ui.setconfig('phases', 'new-commit', phases.secret)
                editor = cmdutil.getcommiteditor(editform='shelve.shelve',
                                                 **opts)
                return repo.commit(message,
                                   user,
                                   opts.get('date'),
                                   match,
                                   editor=editor,
                                   extra=extra)
            finally:
                repo.ui.restoreconfig(backup)
                if hasmq:
                    repo.mq.checkapplied = saved

        def interactivecommitfunc(ui, repo, *pats, **opts):
            match = scmutil.match(repo['.'], pats, {})
            message = opts['message']
            return commitfunc(ui, repo, message, match, opts)

        if not interactive:
            node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
        else:
            node = cmdutil.dorecord(ui, repo, interactivecommitfunc, None,
                                    False, cmdutil.recordfilter, *pats, **opts)
        if not node:
            stat = repo.status(match=scmutil.match(repo[None], pats, opts))
            if stat.deleted:
                ui.status(
                    _("nothing changed (%d missing files, see "
                      "'hg status')\n") % len(stat.deleted))
            else:
                ui.status(_("nothing changed\n"))
            return 1

        bases = list(mutableancestors(repo[node]))
        shelvedfile(repo, name, 'hg').writebundle(bases, node)
        cmdutil.export(repo, [node],
                       fp=shelvedfile(repo, name, 'patch').opener('wb'),
                       opts=mdiff.diffopts(git=True))

        if ui.formatted():
            desc = util.ellipsis(desc, ui.termwidth())
        ui.status(_('shelved as %s\n') % name)
        hg.update(repo, parent.node())
        if origbranch != repo['.'].branch() and not _isbareshelve(pats, opts):
            repo.dirstate.setbranch(origbranch)

        _aborttransaction(repo)
    finally:
        lockmod.release(tr, lock)
Exemplo n.º 2
0
    def annotate(self, rev, master=None, showpath=False, showlines=False):
        """incrementally update the cache so it includes revisions in the main
        branch till 'master'. and run annotate on 'rev', which may or may not be
        included in the main branch.

        if master is None, do not update linelog.

        the first value returned is the annotate result, it is [(node, linenum)]
        by default. [(node, linenum, path)] if showpath is True.

        if showlines is True, a second value will be returned, it is a list of
        corresponding line contents.
        """

        # the fast path test requires commit hash, convert rev number to hash,
        # so it may hit the fast path. note: in the "fctx" mode, the "annotate"
        # command could give us a revision number even if the user passes a
        # commit hash.
        if isinstance(rev, int):
            rev = node.hex(self.repo.changelog.node(rev))

        # fast path: if rev is in the main branch already
        directly, revfctx = self.canannotatedirectly(rev)
        if directly:
            if self.ui.debugflag:
                self.ui.debug('fastannotate: %s: using fast path '
                              '(resolved fctx: %s)\n'
                              % (self.path,
                                 stringutil.pprint(util.safehasattr(revfctx,
                                                                    'node'))))
            return self.annotatedirectly(revfctx, showpath, showlines)

        # resolve master
        masterfctx = None
        if master:
            try:
                masterfctx = self._resolvefctx(master, resolverev=True,
                                               adjustctx=True)
            except LookupError: # master does not have the file
                pass
            else:
                if masterfctx in self.revmap: # no need to update linelog
                    masterfctx = None

        #                  ... - @ <- rev (can be an arbitrary changeset,
        #                 /                not necessarily a descendant
        #      master -> o                 of master)
        #                |
        #     a merge -> o         'o': new changesets in the main branch
        #                |\        '#': revisions in the main branch that
        #                o *            exist in linelog / revmap
        #                | .       '*': changesets in side branches, or
        # last master -> # .            descendants of master
        #                | .
        #                # *       joint: '#', and is a parent of a '*'
        #                |/
        #     a joint -> # ^^^^ --- side branches
        #                |
        #                ^ --- main branch (in linelog)

        # these DFSes are similar to the traditional annotate algorithm.
        # we cannot really reuse the code for perf reason.

        # 1st DFS calculates merges, joint points, and needed.
        # "needed" is a simple reference counting dict to free items in
        # "hist", reducing its memory usage otherwise could be huge.
        initvisit = [revfctx]
        if masterfctx:
            if masterfctx.rev() is None:
                raise error.Abort(_('cannot update linelog to wdir()'),
                                  hint=_('set fastannotate.mainbranch'))
            initvisit.append(masterfctx)
        visit = initvisit[:]
        pcache = {}
        needed = {revfctx: 1}
        hist = {} # {fctx: ([(llrev or fctx, linenum)], text)}
        while visit:
            f = visit.pop()
            if f in pcache or f in hist:
                continue
            if f in self.revmap: # in the old main branch, it's a joint
                llrev = self.revmap.hsh2rev(f.node())
                self.linelog.annotate(llrev)
                result = self.linelog.annotateresult
                hist[f] = (result, f.data())
                continue
            pl = self._parentfunc(f)
            pcache[f] = pl
            for p in pl:
                needed[p] = needed.get(p, 0) + 1
                if p not in pcache:
                    visit.append(p)

        # 2nd (simple) DFS calculates new changesets in the main branch
        # ('o' nodes in # the above graph), so we know when to update linelog.
        newmainbranch = set()
        f = masterfctx
        while f and f not in self.revmap:
            newmainbranch.add(f)
            pl = pcache[f]
            if pl:
                f = pl[0]
            else:
                f = None
                break

        # f, if present, is the position where the last build stopped at, and
        # should be the "master" last time. check to see if we can continue
        # building the linelog incrementally. (we cannot if diverged)
        if masterfctx is not None:
            self._checklastmasterhead(f)

        if self.ui.debugflag:
            if newmainbranch:
                self.ui.debug('fastannotate: %s: %d new changesets in the main'
                              ' branch\n' % (self.path, len(newmainbranch)))
            elif not hist: # no joints, no updates
                self.ui.debug('fastannotate: %s: linelog cannot help in '
                              'annotating this revision\n' % self.path)

        # prepare annotateresult so we can update linelog incrementally
        self.linelog.annotate(self.linelog.maxrev)

        # 3rd DFS does the actual annotate
        visit = initvisit[:]
        progress = self.ui.makeprogress(('building cache'),
                                        total=len(newmainbranch))
        while visit:
            f = visit[-1]
            if f in hist:
                visit.pop()
                continue

            ready = True
            pl = pcache[f]
            for p in pl:
                if p not in hist:
                    ready = False
                    visit.append(p)
            if not ready:
                continue

            visit.pop()
            blocks = None # mdiff blocks, used for appending linelog
            ismainbranch = (f in newmainbranch)
            # curr is the same as the traditional annotate algorithm,
            # if we only care about linear history (do not follow merge),
            # then curr is not actually used.
            assert f not in hist
            curr = _decorate(f)
            for i, p in enumerate(pl):
                bs = list(self._diffblocks(hist[p][1], curr[1]))
                if i == 0 and ismainbranch:
                    blocks = bs
                curr = _pair(hist[p], curr, bs)
                if needed[p] == 1:
                    del hist[p]
                    del needed[p]
                else:
                    needed[p] -= 1

            hist[f] = curr
            del pcache[f]

            if ismainbranch: # need to write to linelog
                progress.increment()
                bannotated = None
                if len(pl) == 2 and self.opts.followmerge: # merge
                    bannotated = curr[0]
                if blocks is None: # no parents, add an empty one
                    blocks = list(self._diffblocks('', curr[1]))
                self._appendrev(f, blocks, bannotated)
            elif showpath: # not append linelog, but we need to record path
                self._node2path[f.node()] = f.path()

        progress.complete()

        result = [
            ((self.revmap.rev2hsh(fr) if isinstance(fr, int) else fr.node()), l)
            for fr, l in hist[revfctx][0]] # [(node, linenumber)]
        return self._refineannotateresult(result, revfctx, showpath, showlines)
Exemplo n.º 3
0
    def apply(self, repo, source, revmap, merges, opts=None):
        '''apply the revisions in revmap one by one in revision order'''
        if opts is None:
            opts = {}
        revs = sorted(revmap)
        p1 = repo.dirstate.p1()
        pulls = []
        diffopts = patch.difffeatureopts(self.ui, opts)
        diffopts.git = True

        lock = tr = None
        try:
            lock = repo.lock()
            tr = repo.transaction('transplant')
            for rev in revs:
                node = revmap[rev]
                revstr = '%d:%s' % (rev, nodemod.short(node))

                if self.applied(repo, node, p1):
                    self.ui.warn(
                        _('skipping already applied revision %s\n') % revstr)
                    continue

                parents = source.changelog.parents(node)
                if not (opts.get('filter') or opts.get('log')):
                    # If the changeset parent is the same as the
                    # wdir's parent, just pull it.
                    if parents[0] == p1:
                        pulls.append(node)
                        p1 = node
                        continue
                    if pulls:
                        if source != repo:
                            exchange.pull(repo, source.peer(), heads=pulls)
                        merge.update(repo,
                                     pulls[-1],
                                     branchmerge=False,
                                     force=False)
                        p1 = repo.dirstate.p1()
                        pulls = []

                domerge = False
                if node in merges:
                    # pulling all the merge revs at once would mean we
                    # couldn't transplant after the latest even if
                    # transplants before them fail.
                    domerge = True
                    if not hasnode(repo, node):
                        exchange.pull(repo, source.peer(), heads=[node])

                skipmerge = False
                if parents[1] != revlog.nullid:
                    if not opts.get('parent'):
                        self.ui.note(
                            _('skipping merge changeset %d:%s\n') %
                            (rev, nodemod.short(node)))
                        skipmerge = True
                    else:
                        parent = source.lookup(opts['parent'])
                        if parent not in parents:
                            raise error.Abort(
                                _('%s is not a parent of %s') %
                                (nodemod.short(parent), nodemod.short(node)))
                else:
                    parent = parents[0]

                if skipmerge:
                    patchfile = None
                else:
                    fd, patchfile = pycompat.mkstemp(prefix='hg-transplant-')
                    fp = os.fdopen(fd, r'wb')
                    gen = patch.diff(source, parent, node, opts=diffopts)
                    for chunk in gen:
                        fp.write(chunk)
                    fp.close()

                del revmap[rev]
                if patchfile or domerge:
                    try:
                        try:
                            n = self.applyone(repo,
                                              node,
                                              source.changelog.read(node),
                                              patchfile,
                                              merge=domerge,
                                              log=opts.get('log'),
                                              filter=opts.get('filter'))
                        except TransplantError:
                            # Do not rollback, it is up to the user to
                            # fix the merge or cancel everything
                            tr.close()
                            raise
                        if n and domerge:
                            self.ui.status(
                                _('%s merged at %s\n') %
                                (revstr, nodemod.short(n)))
                        elif n:
                            self.ui.status(
                                _('%s transplanted to %s\n') %
                                (nodemod.short(node), nodemod.short(n)))
                    finally:
                        if patchfile:
                            os.unlink(patchfile)
            tr.close()
            if pulls:
                exchange.pull(repo, source.peer(), heads=pulls)
                merge.update(repo, pulls[-1], branchmerge=False, force=False)
        finally:
            self.saveseries(revmap, merges)
            self.transplants.write()
            if tr:
                tr.release()
            if lock:
                lock.release()
Exemplo n.º 4
0
def _convert_rev(ui, meta, svn, r, tbdelta, firstrun):

    editor = meta.editor
    editor.current.clear()
    editor.current.rev = r
    editor.setsvn(svn)

    if firstrun and meta.revmap.firstpulled <= 0:
        # We know nothing about this project, so fetch everything before
        # trying to apply deltas.
        ui.debug('replay: fetching full revision\n')
        svn.get_revision(r.revnum, editor)
    else:
        svn.get_replay(r.revnum, editor, meta.revmap.firstpulled)
    editor.close()

    current = editor.current

    updateexternals(ui, meta, current)

    if current.exception is not None:  # pragma: no cover
        traceback.print_exception(*current.exception)
        raise ReplayException()

    files_to_commit = current.files()
    branch_batches = {}
    rev = current.rev
    date = meta.fixdate(rev.date)

    # build up the branches that have files on them
    failoninvalid = ui.configbool('hgsubversion',
            'failoninvalidreplayfile', False)
    for f in files_to_commit:
        if not meta.is_path_valid(f):
            if failoninvalid:
                raise hgerror.Abort('file %s should not be in commit list' % f)
            continue
        p, b = meta.split_branch_path(f)[:2]
        if b not in branch_batches:
            branch_batches[b] = []
        if p:
            branch_batches[b].append((p, f))

    closebranches = {}
    for branch in tbdelta['branches'][1]:
        branchedits = meta.revmap.branchedits(branch, rev.revnum)
        if len(branchedits) < 1:
            # can't close a branch that never existed
            continue
        ha = branchedits[0][1]
        closebranches[branch] = ha

    extraempty = (set(tbdelta['branches'][0]) -
                  (set(current.emptybranches) | set(branch_batches.keys())))
    current.emptybranches.update([(x, False) for x in extraempty])

    # 1. handle normal commits
    closedrevs = closebranches.values()
    for branch, files in branch_batches.iteritems():

        if branch in current.emptybranches and files:
            del current.emptybranches[branch]

        if meta.skipbranch(branch):
            # make sure we also get rid of it from emptybranches
            if branch in current.emptybranches:
                del current.emptybranches[branch]
            continue

        files = dict(files)
        parents = meta.get_parent_revision(rev.revnum, branch), revlog.nullid
        if parents[0] in closedrevs and branch in meta.closebranches:
            continue

        extra = meta.genextra(rev.revnum, branch)
        tag = None
        if branch is not None:
            # New regular tag without modifications, it will be committed by
            # svnmeta.committag(), we can skip the whole branch for now
            tag = meta.get_path_tag(meta.remotename(branch))
            if (tag and tag not in meta.tags
                and branch not in meta.branches
                and branch not in compathacks.branchset(meta.repo)
                and not files):
                continue

        parentctx = meta.repo[parents[0]]
        if tag:
            if parentctx.node() == node.nullid:
                continue
            extra.update({'branch': parentctx.extra().get('branch', None),
                          'close': 1})

        def filectxfn(repo, memctx, path):
            current_file = files[path]
            try:
                data, isexec, islink, copied = current.pop(current_file)
            except IOError:
                return compathacks.filectxfn_deleted_reraise(memctx)
            if isexec is None or islink is None:
                flags = parentctx.flags(path)
                if isexec is None:
                    isexec = 'x' in flags
                if islink is None:
                    islink = 'l' in flags

            if data is not None:
                if islink:
                    if data.startswith('link '):
                        data = data[len('link '):]
                    else:
                        ui.debug('file marked as link, but may contain data: '
                            '%s\n' % current_file)
            else:
                data = parentctx.filectx(path).data()
            return compathacks.makememfilectx(repo,
                                              memctx=memctx,
                                              path=path,
                                              data=data,
                                              islink=islink,
                                              isexec=isexec,
                                              copied=copied)

        meta.mapbranch(extra)
        if 'branch' not in extra:
            extra['branch'] = 'default'
        current_ctx = context.memctx(meta.repo,
                                     parents,
                                     util.forceutf8(meta.getmessage(rev)),
                                     [util.forceutf8(f) for f in files.keys()],
                                     filectxfn,
                                     util.forceutf8(meta.authors[rev.author]),
                                     date,
                                     extra)

        new_hash = meta.repo.svn_commitctx(current_ctx)
        util.describe_commit(ui, new_hash, branch)
        if (rev.revnum, branch) not in meta.revmap and not tag:
            meta.revmap[rev.revnum, branch] = new_hash
        if tag:
            meta.movetag(tag, new_hash, rev, date)
            meta.addedtags.pop(tag, None)

    # 2. handle branches that need to be committed without any files
    for branch in current.emptybranches:

        if meta.skipbranch(branch):
            continue

        ha = meta.get_parent_revision(rev.revnum, branch)
        if ha == node.nullid:
            continue

        files = []
        def del_all_files(*args):
            raise IOError(errno.ENOENT, 'deleting all files')

        # True here means nuke all files.  This happens when you
        # replace a branch root with an empty directory
        if current.emptybranches[branch]:
            files = meta.repo[ha].files()

        extra = meta.genextra(rev.revnum, branch)
        meta.mapbranch(extra)

        current_ctx = context.memctx(meta.repo,
                                     (ha, node.nullid),
                                     util.forceutf8(meta.getmessage(rev)),
                                     [util.forceutf8(f) for f in files],
                                     del_all_files,
                                     util.forceutf8(meta.authors[rev.author]),
                                     date,
                                     extra)
        new_hash = meta.repo.svn_commitctx(current_ctx)
        util.describe_commit(ui, new_hash, branch)
        if (rev.revnum, branch) not in meta.revmap:
            meta.revmap[rev.revnum, branch] = new_hash

    return closebranches
Exemplo n.º 5
0
def email(ui, repo, *revs, **opts):
    '''send changesets by email

    By default, diffs are sent in the format generated by
    :hg:`export`, one per message. The series starts with a "[PATCH 0
    of N]" introduction, which describes the series as a whole.

    Each patch email has a Subject line of "[PATCH M of N] ...", using
    the first line of the changeset description as the subject text.
    The message contains two or three parts. First, the changeset
    description.

    With the -d/--diffstat option, if the diffstat program is
    installed, the result of running diffstat on the patch is inserted.

    Finally, the patch itself, as generated by :hg:`export`.

    With the -d/--diffstat or --confirm options, you will be presented
    with a final summary of all messages and asked for confirmation before
    the messages are sent.

    By default the patch is included as text in the email body for
    easy reviewing. Using the -a/--attach option will instead create
    an attachment for the patch. With -i/--inline an inline attachment
    will be created. You can include a patch both as text in the email
    body and as a regular or an inline attachment by combining the
    -a/--attach or -i/--inline with the --body option.

    With -o/--outgoing, emails will be generated for patches not found
    in the destination repository (or only those which are ancestors
    of the specified revisions if any are provided)

    With -b/--bundle, changesets are selected as for --outgoing, but a
    single email containing a binary Mercurial bundle as an attachment
    will be sent. Use the ``patchbomb.bundletype`` config option to
    control the bundle type as with :hg:`bundle --type`.

    With -m/--mbox, instead of previewing each patchbomb message in a
    pager or sending the messages directly, it will create a UNIX
    mailbox file with the patch emails. This mailbox file can be
    previewed with any mail user agent which supports UNIX mbox
    files.

    With -n/--test, all steps will run, but mail will not be sent.
    You will be prompted for an email recipient address, a subject and
    an introductory message describing the patches of your patchbomb.
    Then when all is done, patchbomb messages are displayed. If the
    PAGER environment variable is set, your pager will be fired up once
    for each patchbomb message, so you can verify everything is alright.

    In case email sending fails, you will find a backup of your series
    introductory message in ``.hg/last-email.txt``.

    The default behavior of this command can be customized through
    configuration. (See :hg:`help patchbomb` for details)

    Examples::

      hg email -r 3000          # send patch 3000 only
      hg email -r 3000 -r 3001  # send patches 3000 and 3001
      hg email -r 3000:3005     # send patches 3000 through 3005
      hg email 3000             # send patch 3000 (deprecated)

      hg email -o               # send all patches not in default
      hg email -o DEST          # send all patches not in DEST
      hg email -o -r 3000       # send all ancestors of 3000 not in default
      hg email -o -r 3000 DEST  # send all ancestors of 3000 not in DEST

      hg email -b               # send bundle of all patches not in default
      hg email -b DEST          # send bundle of all patches not in DEST
      hg email -b -r 3000       # bundle of all ancestors of 3000 not in default
      hg email -b -r 3000 DEST  # bundle of all ancestors of 3000 not in DEST

      hg email -o -m mbox &&    # generate an mbox file...
        mutt -R -f mbox         # ... and view it with mutt
      hg email -o -m mbox &&    # generate an mbox file ...
        formail -s sendmail \\   # ... and use formail to send from the mbox
          -bm -t < mbox         # ... using sendmail

    Before using this command, you will need to enable email in your
    hgrc. See the [email] section in hgrc(5) for details.
    '''

    _charsets = mail._charsets(ui)

    bundle = opts.get('bundle')
    date = opts.get('date')
    mbox = opts.get('mbox')
    outgoing = opts.get('outgoing')
    rev = opts.get('rev')
    # internal option used by pbranches
    patches = opts.get('patches')

    if not (opts.get('test') or mbox):
        # really sending
        mail.validateconfig(ui)

    if not (revs or rev or outgoing or bundle or patches):
        raise error.Abort(_('specify at least one changeset with -r or -o'))

    if outgoing and bundle:
        raise error.Abort(_("--outgoing mode always on with --bundle;"
                           " do not re-specify --outgoing"))

    if outgoing or bundle:
        if len(revs) > 1:
            raise error.Abort(_("too many destinations"))
        if revs:
            dest = revs[0]
        else:
            dest = None
        revs = []

    if rev:
        if revs:
            raise error.Abort(_('use only one form to specify the revision'))
        revs = rev

    revs = scmutil.revrange(repo, revs)
    if outgoing:
        revs = _getoutgoing(repo, dest, revs)
    if bundle:
        opts['revs'] = [str(r) for r in revs]

    # check if revision exist on the public destination
    publicurl = repo.ui.config('patchbomb', 'publicurl')
    if publicurl is not None:
        repo.ui.debug('checking that revision exist in the public repo')
        try:
            publicpeer = hg.peer(repo, {}, publicurl)
        except error.RepoError:
            repo.ui.write_err(_('unable to access public repo: %s\n')
                              % publicurl)
            raise
        if not publicpeer.capable('known'):
            repo.ui.debug('skipping existence checks: public repo too old')
        else:
            out = [repo[r] for r in revs]
            known = publicpeer.known(h.node() for h in out)
            missing = []
            for idx, h in enumerate(out):
                if not known[idx]:
                    missing.append(h)
            if missing:
                if 1 < len(missing):
                    msg = _('public "%s" is missing %s and %i others')
                    msg %= (publicurl, missing[0], len(missing) - 1)
                else:
                    msg = _('public url %s is missing %s')
                    msg %= (publicurl, missing[0])
                revhint = ''.join('-r %s' % h
                                  for h in repo.set('heads(%ld)', missing))
                hint = _('use "hg push %s %s"') % (publicurl, revhint)
                raise error.Abort(msg, hint=hint)

    # start
    if date:
        start_time = util.parsedate(date)
    else:
        start_time = util.makedate()

    def genmsgid(id):
        return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())

    # deprecated config: patchbomb.from
    sender = (opts.get('from') or ui.config('email', 'from') or
              ui.config('patchbomb', 'from') or
              prompt(ui, 'From', ui.username()))

    if patches:
        msgs = _getpatchmsgs(repo, sender, patches, opts.get('patchnames'),
                             **opts)
    elif bundle:
        bundledata = _getbundle(repo, dest, **opts)
        bundleopts = opts.copy()
        bundleopts.pop('bundle', None)  # already processed
        msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
    else:
        _patches = list(_getpatches(repo, revs, **opts))
        msgs = _getpatchmsgs(repo, sender, _patches, **opts)

    showaddrs = []

    def getaddrs(header, ask=False, default=None):
        configkey = header.lower()
        opt = header.replace('-', '_').lower()
        addrs = opts.get(opt)
        if addrs:
            showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
            return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))

        # not on the command line: fallback to config and then maybe ask
        addr = (ui.config('email', configkey) or
                ui.config('patchbomb', configkey) or
                '')
        if not addr and ask:
            addr = prompt(ui, header, default=default)
        if addr:
            showaddrs.append('%s: %s' % (header, addr))
            return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
        else:
            return default

    to = getaddrs('To', ask=True)
    if not to:
        # we can get here in non-interactive mode
        raise error.Abort(_('no recipient addresses provided'))
    cc = getaddrs('Cc', ask=True, default='') or []
    bcc = getaddrs('Bcc') or []
    replyto = getaddrs('Reply-To')

    confirm = ui.configbool('patchbomb', 'confirm')
    confirm |= bool(opts.get('diffstat') or opts.get('confirm'))

    if confirm:
        ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
        ui.write(('From: %s\n' % sender), label='patchbomb.from')
        for addr in showaddrs:
            ui.write('%s\n' % addr, label='patchbomb.to')
        for m, subj, ds in msgs:
            ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
            if ds:
                ui.write(ds, label='patchbomb.diffstats')
        ui.write('\n')
        if ui.promptchoice(_('are you sure you want to send (yn)?'
                             '$$ &Yes $$ &No')):
            raise error.Abort(_('patchbomb canceled'))

    ui.write('\n')

    parent = opts.get('in_reply_to') or None
    # angle brackets may be omitted, they're not semantically part of the msg-id
    if parent is not None:
        if not parent.startswith('<'):
            parent = '<' + parent
        if not parent.endswith('>'):
            parent += '>'

    sender_addr = emailmod.Utils.parseaddr(sender)[1]
    sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
    sendmail = None
    firstpatch = None
    for i, (m, subj, ds) in enumerate(msgs):
        try:
            m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
            if not firstpatch:
                firstpatch = m['Message-Id']
            m['X-Mercurial-Series-Id'] = firstpatch
        except TypeError:
            m['Message-Id'] = genmsgid('patchbomb')
        if parent:
            m['In-Reply-To'] = parent
            m['References'] = parent
        if not parent or 'X-Mercurial-Node' not in m:
            parent = m['Message-Id']

        m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
        m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True)

        start_time = (start_time[0] + 1, start_time[1])
        m['From'] = sender
        m['To'] = ', '.join(to)
        if cc:
            m['Cc']  = ', '.join(cc)
        if bcc:
            m['Bcc'] = ', '.join(bcc)
        if replyto:
            m['Reply-To'] = ', '.join(replyto)
        if opts.get('test'):
            ui.status(_('displaying '), subj, ' ...\n')
            ui.flush()
            if 'PAGER' in os.environ and not ui.plain():
                fp = util.popen(os.environ['PAGER'], 'w')
            else:
                fp = ui
            generator = emailmod.Generator.Generator(fp, mangle_from_=False)
            try:
                generator.flatten(m, 0)
                fp.write('\n')
            except IOError as inst:
                if inst.errno != errno.EPIPE:
                    raise
            if fp is not ui:
                fp.close()
        else:
            if not sendmail:
                verifycert = ui.config('smtp', 'verifycert', 'strict')
                if opts.get('insecure'):
                    ui.setconfig('smtp', 'verifycert', 'loose', 'patchbomb')
                try:
                    sendmail = mail.connect(ui, mbox=mbox)
                finally:
                    ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb')
            ui.status(_('sending '), subj, ' ...\n')
            ui.progress(_('sending'), i, item=subj, total=len(msgs))
            if not mbox:
                # Exim does not remove the Bcc field
                del m['Bcc']
            fp = cStringIO.StringIO()
            generator = emailmod.Generator.Generator(fp, mangle_from_=False)
            generator.flatten(m, 0)
            sendmail(sender_addr, to + bcc + cc, fp.getvalue())

    ui.progress(_('writing'), None)
    ui.progress(_('sending'), None)
Exemplo n.º 6
0
def debugindex(orig, ui, repo, file_=None, **opts):
    """dump the contents of an index file"""
    if (opts.get('changelog') or opts.get('manifest') or opts.get('dir')
            or not shallowutil.isenabled(repo)
            or not repo.shallowmatch(file_)):
        return orig(ui, repo, file_, **opts)

    r = buildtemprevlog(repo, file_)

    # debugindex like normal
    format = opts.get(b'format', 0)
    if format not in (0, 1):
        raise error.Abort(_(b"unknown format %d") % format)

    generaldelta = r.version & revlog.FLAG_GENERALDELTA
    if generaldelta:
        basehdr = b' delta'
    else:
        basehdr = b'  base'

    if format == 0:
        ui.write((b"   rev    offset  length " + basehdr + b" linkrev"
                  b" nodeid       p1           p2\n"))
    elif format == 1:
        ui.write((b"   rev flag   offset   length"
                  b"     size " + basehdr + b"   link     p1     p2"
                  b"       nodeid\n"))

    for i in r:
        node = r.node(i)
        if generaldelta:
            base = r.deltaparent(i)
        else:
            base = r.chainbase(i)
        if format == 0:
            try:
                pp = r.parents(node)
            except Exception:
                pp = [nullid, nullid]
            ui.write(b"% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
                i,
                r.start(i),
                r.length(i),
                base,
                r.linkrev(i),
                short(node),
                short(pp[0]),
                short(pp[1]),
            ))
        elif format == 1:
            pr = r.parentrevs(i)
            ui.write(b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
                i,
                r.flags(i),
                r.start(i),
                r.length(i),
                r.rawsize(i),
                base,
                r.linkrev(i),
                pr[0],
                pr[1],
                short(node),
            ))
Exemplo n.º 7
0
    def getchanges(self, version, full):
        if full:
            raise error.Abort(_("convert from git does not support --full"))
        self.modecache = {}
        cmd = ['diff-tree', '-z', '--root', '-m', '-r'
               ] + self.simopt + [version]
        output, status = self.gitrun(*cmd)
        if status:
            raise error.Abort(_('cannot read changes in %s') % version)
        changes = []
        copies = {}
        seen = set()
        entry = None
        subexists = [False]
        subdeleted = [False]
        difftree = output.split('\x00')
        lcount = len(difftree)
        i = 0

        skipsubmodules = self.ui.configbool('convert', 'git.skipsubmodules',
                                            False)

        def add(entry, f, isdest):
            seen.add(f)
            h = entry[3]
            p = (entry[1] == "100755")
            s = (entry[1] == "120000")
            renamesource = (not isdest and entry[4][0] == 'R')

            if f == '.gitmodules':
                if skipsubmodules:
                    return

                subexists[0] = True
                if entry[4] == 'D' or renamesource:
                    subdeleted[0] = True
                    changes.append(('.hgsub', nodemod.nullhex))
                else:
                    changes.append(('.hgsub', ''))
            elif entry[1] == '160000' or entry[0] == ':160000':
                if not skipsubmodules:
                    subexists[0] = True
            else:
                if renamesource:
                    h = nodemod.nullhex
                self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
                changes.append((f, h))

        while i < lcount:
            l = difftree[i]
            i += 1
            if not entry:
                if not l.startswith(':'):
                    continue
                entry = l.split()
                continue
            f = l
            if entry[4][0] == 'C':
                copysrc = f
                copydest = difftree[i]
                i += 1
                f = copydest
                copies[copydest] = copysrc
            if f not in seen:
                add(entry, f, False)
            # A file can be copied multiple times, or modified and copied
            # simultaneously. So f can be repeated even if fdest isn't.
            if entry[4][0] == 'R':
                # rename: next line is the destination
                fdest = difftree[i]
                i += 1
                if fdest not in seen:
                    add(entry, fdest, True)
                    # .gitmodules isn't imported at all, so it being copied to
                    # and fro doesn't really make sense
                    if f != '.gitmodules' and fdest != '.gitmodules':
                        copies[fdest] = f
            entry = None

        if subexists[0]:
            if subdeleted[0]:
                changes.append(('.hgsubstate', nodemod.nullhex))
            else:
                self.retrievegitmodules(version)
                changes.append(('.hgsubstate', ''))
        return (changes, copies, set())
Exemplo n.º 8
0
def debugdrawdag(ui, repo, **opts):
    """read an ASCII graph from stdin and create changesets

    The ASCII graph is like what :hg:`log -G` outputs, with each `o` replaced
    to the name of the node. The command will create dummy changesets and local
    tags with those names to make the dummy changesets easier to be referred
    to.

    If the name of a node is a single character 'o', It will be replaced by the
    word to the right. This makes it easier to reuse
    :hg:`log -G -T '{desc}'` outputs.

    For root (no parents) nodes, revset can be used to query existing repo.
    Note that the revset cannot have confusing characters which can be seen as
    the part of the graph edges, like `|/+-\`.
    """
    text = ui.fin.read()

    # parse the graph and make sure len(parents) <= 2 for each node
    edges = _parseasciigraph(text)
    for k, v in edges.items():
        if len(v) > 2:
            raise error.Abort(
                _('%s: too many parents: %s') % (k, b' '.join(v)))

    # parse comments to get extra file content instructions
    files = collections.defaultdict(dict)  # {(name, path): content}
    comments = list(_getcomments(text))
    filere = re.compile(br'^(\w+)/([\w/]+)\s*=\s*(.*)$', re.M)
    for name, path, content in filere.findall(b'\n'.join(comments)):
        content = content.replace(br'\n', b'\n').replace(br'\1', b'\1')
        files[name][path] = content

    committed = {None: node.nullid}  # {name: node}

    # for leaf nodes, try to find existing nodes in repo
    for name, parents in edges.items():
        if len(parents) == 0:
            try:
                committed[name] = scmutil.revsingle(repo, name)
            except error.RepoLookupError:
                pass

    # commit in topological order
    for name, parents in _walkgraph(edges):
        if name in committed:
            continue
        pctxs = [repo[committed[n]] for n in parents]
        pctxs.sort(key=lambda c: c.node())
        added = {}
        if len(parents) > 1:
            # If it's a merge, take the files and contents from the parents
            for f in pctxs[1].manifest():
                if f not in pctxs[0].manifest():
                    added[f] = pctxs[1][f].data()
        else:
            # If it's not a merge, add a single file
            added[name] = name
        # add extra file contents in comments
        for path, content in files.get(name, {}).items():
            added[path] = content
        ctx = simplecommitctx(repo, name, pctxs, added)
        n = ctx.commit()
        committed[name] = n
        tagsmod.tag(repo, [name],
                    n,
                    message=None,
                    user=None,
                    date=None,
                    local=True)

    # handle special comments
    with repo.wlock(), repo.lock(), repo.transaction(b'drawdag'):
        getctx = lambda x: repo.unfiltered()[committed[x.strip()]]
        for comment in comments:
            rels = []  # obsolete relationships
            args = comment.split(b':', 1)
            if len(args) <= 1:
                continue

            cmd = args[0].strip()
            arg = args[1].strip()

            if cmd in (b'replace', b'rebase', b'amend'):
                nodes = [getctx(m) for m in arg.split(b'->')]
                for i in range(len(nodes) - 1):
                    rels.append((nodes[i], (nodes[i + 1], )))
            elif cmd in (b'split', ):
                pre, succs = arg.split(b'->')
                succs = succs.split(b',')
                rels.append((getctx(pre), (getctx(s) for s in succs)))
            elif cmd in (b'prune', ):
                for n in arg.split(b','):
                    rels.append((getctx(n), ()))
            if rels:
                obsolete.createmarkers(repo, rels, date=(0, 0), operation=cmd)
Exemplo n.º 9
0
def hook(ui, repo, hooktype, node=None, source=None, **kwargs):

    ensureenabled(ui)

    if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
        raise error.Abort(
            _('config error - hook type "%s" cannot stop '
              'incoming changesets nor commits') % hooktype)
    if (hooktype == 'pretxnchangegroup'
            and source not in ui.configlist('acl', 'sources')):
        ui.debug('acl: changes have source "%s" - skipping\n' % source)
        return

    user = None
    if source == 'serve' and 'url' in kwargs:
        url = kwargs['url'].split(':')
        if url[0] == 'remote' and url[1].startswith('http'):
            user = urlreq.unquote(url[3])

    if user is None:
        user = getpass.getuser()

    ui.debug('acl: checking access for user "%s"\n' % user)

    # deprecated config: acl.config
    cfg = ui.config('acl', 'config')
    if cfg:
        ui.readconfig(cfg,
                      sections=[
                          'acl.groups', 'acl.allow.branches',
                          'acl.deny.branches', 'acl.allow', 'acl.deny'
                      ])

    allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
    denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
    allow = buildmatch(ui, repo, user, 'acl.allow')
    deny = buildmatch(ui, repo, user, 'acl.deny')

    for rev in xrange(repo[node], len(repo)):
        ctx = repo[rev]
        branch = ctx.branch()
        if denybranches and denybranches(branch):
            raise error.Abort(
                _('acl: user "%s" denied on branch "%s"'
                  ' (changeset "%s")') % (user, branch, ctx))
        if allowbranches and not allowbranches(branch):
            raise error.Abort(
                _('acl: user "%s" not allowed on branch "%s"'
                  ' (changeset "%s")') % (user, branch, ctx))
        ui.debug('acl: branch access granted: "%s" on branch "%s"\n' %
                 (ctx, branch))

        for f in ctx.files():
            if deny and deny(f):
                raise error.Abort(
                    _('acl: user "%s" denied on "%s"'
                      ' (changeset "%s")') % (user, f, ctx))
            if allow and not allow(f):
                raise error.Abort(
                    _('acl: user "%s" not allowed on "%s"'
                      ' (changeset "%s")') % (user, f, ctx))
        ui.debug('acl: path access granted: "%s"\n' % ctx)
Exemplo n.º 10
0
def _dotransplant(ui, repo, *revs, **opts):
    def incwalk(repo, csets, match=util.always):
        for node in csets:
            if match(node):
                yield node

    def transplantwalk(repo, dest, heads, match=util.always):
        '''Yield all nodes that are ancestors of a head but not ancestors
        of dest.
        If no heads are specified, the heads of repo will be used.'''
        if not heads:
            heads = repo.heads()
        ancestors = []
        ctx = repo[dest]
        for head in heads:
            ancestors.append(ctx.ancestor(repo[head]).node())
        for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
            if match(node):
                yield node

    def checkopts(opts, revs):
        if opts.get('continue'):
            if opts.get('branch') or opts.get('all') or opts.get('merge'):
                raise error.Abort(_('--continue is incompatible with '
                                   '--branch, --all and --merge'))
            return
        if not (opts.get('source') or revs or
                opts.get('merge') or opts.get('branch')):
            raise error.Abort(_('no source URL, branch revision, or revision '
                               'list provided'))
        if opts.get('all'):
            if not opts.get('branch'):
                raise error.Abort(_('--all requires a branch revision'))
            if revs:
                raise error.Abort(_('--all is incompatible with a '
                                   'revision list'))

    checkopts(opts, revs)

    if not opts.get('log'):
        # deprecated config: transplant.log
        opts['log'] = ui.config('transplant', 'log')
    if not opts.get('filter'):
        # deprecated config: transplant.filter
        opts['filter'] = ui.config('transplant', 'filter')

    tp = transplanter(ui, repo, opts)

    p1, p2 = repo.dirstate.parents()
    if len(repo) > 0 and p1 == revlog.nullid:
        raise error.Abort(_('no revision checked out'))
    if opts.get('continue'):
        if not tp.canresume():
            raise error.Abort(_('no transplant to continue'))
    else:
        cmdutil.checkunfinished(repo)
        if p2 != revlog.nullid:
            raise error.Abort(_('outstanding uncommitted merges'))
        m, a, r, d = repo.status()[:4]
        if m or a or r or d:
            raise error.Abort(_('outstanding local changes'))

    sourcerepo = opts.get('source')
    if sourcerepo:
        peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
        heads = list(map(peer.lookup, opts.get('branch', ())))
        target = set(heads)
        for r in revs:
            try:
                target.add(peer.lookup(r))
            except error.RepoError:
                pass
        source, csets, cleanupfn = bundlerepo.getremotechanges(ui, repo, peer,
                                    onlyheads=sorted(target), force=True)
    else:
        source = repo
        heads = list(map(source.lookup, opts.get('branch', ())))
        cleanupfn = None

    try:
        if opts.get('continue'):
            tp.resume(repo, source, opts)
            return

        tf = tp.transplantfilter(repo, source, p1)
        if opts.get('prune'):
            prune = set(source.lookup(r)
                        for r in scmutil.revrange(source, opts.get('prune')))
            matchfn = lambda x: tf(x) and x not in prune
        else:
            matchfn = tf
        merges = list(map(source.lookup, opts.get('merge', ())))
        revmap = {}
        if revs:
            for r in scmutil.revrange(source, revs):
                revmap[int(r)] = source.lookup(r)
        elif opts.get('all') or not merges:
            if source != repo:
                alltransplants = incwalk(source, csets, match=matchfn)
            else:
                alltransplants = transplantwalk(source, p1, heads,
                                                match=matchfn)
            if opts.get('all'):
                revs = alltransplants
            else:
                revs, newmerges = browserevs(ui, source, alltransplants, opts)
                merges.extend(newmerges)
            for r in revs:
                revmap[source.changelog.rev(r)] = r
        for r in merges:
            revmap[source.changelog.rev(r)] = r

        tp.apply(repo, source, revmap, merges, opts)
    finally:
        if cleanupfn:
            cleanupfn()
Exemplo n.º 11
0
def _dooverlay(sourcerepo, sourceurl, sourcerevs, destrepo, destctx, prefix,
               noncontiguous, notify=None):
    """Overlay changesets from one repository into another.

    ``sourcerevs`` (iterable of revs) from ``sourcerepo`` will effectively
    be replayed into ``destrepo`` on top of ``destctx``. File paths will be
    added to the directory ``prefix``.

    ``sourcerevs`` may include revisions that have already been overlayed.
    If so, overlay will resume at the first revision not yet processed.

    ``noncontigous`` removes the restriction that sourcerevs must be a
    contiguous DAG.
    """
    assert prefix
    prefix = prefix.rstrip('/') + '/'

    ui = destrepo.ui

    sourcerevs.sort()

    # Source revisions must be a contiguous, single DAG range.
    left = set(sourcerevs)
    left.remove(sourcerevs.last())
    for ctx in sourcerepo[sourcerevs.last()].ancestors():
        if not left:
            break

        try:
            left.remove(ctx.rev())
        except KeyError:
            if not noncontiguous:
                raise error.Abort(
                    _('source revisions must be part of contiguous DAG range'))

    if left:
        raise error.Abort(_('source revisions must be part of same DAG head'))

    sourcerevs = list(sourcerevs)

    sourcecl = sourcerepo.changelog
    allsourcehexes = set(hex(sourcecl.node(rev)) for rev in
                         sourcecl.ancestors([sourcerevs[-1]], inclusive=True))

    # Attempt to find an incoming changeset in dest and prune already processed
    # source revisions.
    lastsourcectx = None
    lastdestctx = None
    for rev in sorted(destrepo.changelog.ancestors([destctx.rev()],
                      inclusive=True), reverse=True):
        ctx = destrepo[rev]
        overlayed = ctx.extra().get(REVISION_KEY)

        # Changesets that weren't imported or that didn't come from the source
        # aren't important to us.
        if not overlayed or overlayed not in allsourcehexes:
            continue

        lastsourcectx = sourcerepo[overlayed]

        # If this imported changeset is in the set scheduled for import,
        # we can prune it and all ancestors from the source set. Since
        # sourcerevs is sorted and is a single DAG head, we can simply find
        # the offset of the first seen rev and assume everything before
        # has been imported.
        try:
            lastdestctx = ctx
            idx = sourcerevs.index(lastsourcectx.rev()) + 1
            ui.write(_('%s already processed as %s; '
                       'skipping %d/%d revisions\n' %
                       (short(lastsourcectx.node()), short(ctx.node()),
                        idx, len(sourcerevs))))
            sourcerevs = sourcerevs[idx:]
            break
        except ValueError:
            # Else the changeset in the destination isn't in the incoming set.
            # This is OK iff the destination changeset is a conversion of
            # the parent of the first incoming changeset.
            # TODO: This assumption doesn't hold with noncontiguous=True
            firstsourcectx = sourcerepo[sourcerevs[0]]
            if firstsourcectx.p1().hex() == overlayed:
                break

            raise error.Abort(_('first source changeset (%s) is not a child '
                                'of last overlayed changeset (%s)') % (
                short(firstsourcectx.node()), short(bin(overlayed))))

    if not sourcerevs:
        ui.write(_('no source revisions left to process\n'))
        return

    # We don't (yet) support overlaying merge commits.
    for rev in sourcerevs:
        ctx = sourcerepo[rev]
        if len(ctx.parents()) > 1:
            raise error.Abort(_('do not support overlaying merges: %s') %
                              short(ctx.node()))

    # If we previously performed an overlay, verify that changeset
    # continuity is uninterrupted.
    #
    # For the default mode of contiguous importing, we verify the last overlayed
    # changeset is the first parent of the first changeset to be overlayed. We
    # also verify that files in the destination match the last overlayed
    # changeset.
    #
    # For non-contiguous operation, we skip the parent check because it doesn't
    # make sense. For file comparisons, we check against the parent of the first
    # incoming changeset rather than the last overlayed changeset.
    #
    # The file content check ensures that repos don't get out of sync. They
    # ensure that diffs from the source repository match diffs in the
    # destination repository.
    if lastsourcectx:
        if not noncontiguous:
            if sourcerepo[sourcerevs[0]].p1() != lastsourcectx:
                raise error.Abort(_('parent of initial source changeset does '
                                    'not match last overlayed changeset (%s)') %
                                  short(lastsourcectx.node()))

            comparectx = lastsourcectx
        else:
            comparectx = sourcerepo[sourcerevs[0]].p1()

        _verifymanifestsequal(ui, sourcerepo, comparectx, destrepo, destctx,
                              prefix, lastsourcectx, lastdestctx, notify)

    # All the validation is done. Proceed with the data conversion.
    with destrepo.lock():
        with destrepo.transaction('overlay'):
            for i, rev in enumerate(sourcerevs):
                ui.makeprogress(_('revisions'), i + 1, total=len(sourcerevs))
                sourcectx = sourcerepo[rev]
                node = _overlayrev(sourcerepo, sourceurl, sourcectx,
                                   destrepo, destctx, prefix)
                summary = sourcectx.description().splitlines()[0]
                ui.write('%s -> %s: %s\n' % (short(sourcectx.node()),
                                             short(node), summary))
                destctx = destrepo[node]

            ui.makeprogress(_('revisions'), None)
Exemplo n.º 12
0
def shelvecmd(ui, repo, *pats, **opts):
    '''save and set aside changes from the working directory

    Shelving takes files that "hg status" reports as not clean, saves
    the modifications to a bundle (a shelved change), and reverts the
    files so that their state in the working directory becomes clean.

    To restore these changes to the working directory, using "hg
    unshelve"; this will work even if you switch to a different
    commit.

    When no files are specified, "hg shelve" saves all not-clean
    files. If specific files or directories are named, only changes to
    those files are shelved.

    In bare shelve(when no files are specified, without interactive,
    include and exclude option), shelving remembers information if the
    working directory was on newly created branch, in other words working
    directory was on different branch than its first parent. In this
    situation unshelving restores branch information to the working directory.

    Each shelved change has a name that makes it easier to find later.
    The name of a shelved change defaults to being based on the active
    bookmark, or if there is no active bookmark, the current named
    branch.  To specify a different name, use ``--name``.

    To see a list of existing shelved changes, use the ``--list``
    option. For each shelved change, this will print its name, age,
    and description; use ``--patch`` or ``--stat`` for more details.

    To delete specific shelved changes, use ``--delete``. To delete
    all shelved changes, use ``--cleanup``.
    '''
    allowables = [
        ('addremove', set(['create'])),  # 'create' is pseudo action
        ('unknown', set(['create'])),
        ('cleanup', set(['cleanup'])),
        #       ('date', set(['create'])), # ignored for passing '--date "0 0"' in tests
        ('delete', set(['delete'])),
        ('edit', set(['create'])),
        ('list', set(['list'])),
        ('message', set(['create'])),
        ('name', set(['create'])),
        ('patch', set(['patch', 'list'])),
        ('stat', set(['stat', 'list'])),
    ]

    def checkopt(opt):
        if opts.get(opt):
            for i, allowable in allowables:
                if opts[i] and opt not in allowable:
                    raise error.Abort(
                        _("options '--%s' and '--%s' may not be "
                          "used together") % (opt, i))
            return True

    if checkopt('cleanup'):
        if pats:
            raise error.Abort(_("cannot specify names when using '--cleanup'"))
        return cleanupcmd(ui, repo)
    elif checkopt('delete'):
        return deletecmd(ui, repo, pats)
    elif checkopt('list'):
        return listcmd(ui, repo, pats, opts)
    elif checkopt('patch'):
        return singlepatchcmds(ui, repo, pats, opts, subcommand='patch')
    elif checkopt('stat'):
        return singlepatchcmds(ui, repo, pats, opts, subcommand='stat')
    else:
        return createcmd(ui, repo, pats, opts)
Exemplo n.º 13
0
def _dounshelve(ui, repo, *shelved, **opts):
    abortf = opts.get('abort')
    continuef = opts.get('continue')
    if not abortf and not continuef:
        cmdutil.checkunfinished(repo)

    if abortf or continuef:
        if abortf and continuef:
            raise error.Abort(_('cannot use both abort and continue'))
        if shelved:
            raise error.Abort(
                _('cannot combine abort/continue with '
                  'naming a shelved change'))
        if abortf and opts.get('tool', False):
            ui.warn(_('tool option will be ignored\n'))

        try:
            state = shelvedstate.load(repo)
        except IOError as err:
            if err.errno != errno.ENOENT:
                raise
            cmdutil.wrongtooltocontinue(repo, _('unshelve'))
        except error.CorruptedState as err:
            ui.debug(str(err) + '\n')
            if continuef:
                msg = _('corrupted shelved state file')
                hint = _('please run hg unshelve --abort to abort unshelve '
                         'operation')
                raise error.Abort(msg, hint=hint)
            elif abortf:
                msg = _('could not read shelved state file, your working copy '
                        'may be in an unexpected state\nplease update to some '
                        'commit\n')
                ui.warn(msg)
                shelvedstate.clear(repo)
            return

        if abortf:
            return unshelveabort(ui, repo, state, opts)
        elif continuef:
            return unshelvecontinue(ui, repo, state, opts)
    elif len(shelved) > 1:
        raise error.Abort(_('can only unshelve one change at a time'))
    elif not shelved:
        shelved = listshelves(repo)
        if not shelved:
            raise error.Abort(_('no shelved changes to apply!'))
        basename = util.split(shelved[0][1])[1]
        ui.status(_("unshelving change '%s'\n") % basename)
    else:
        basename = shelved[0]

    if not shelvedfile(repo, basename, 'patch').exists():
        raise error.Abort(_("shelved change '%s' not found") % basename)

    oldquiet = ui.quiet
    lock = tr = None
    forcemerge = ui.backupconfig('ui', 'forcemerge')
    try:
        ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'unshelve')
        lock = repo.lock()

        tr = repo.transaction('unshelve', report=lambda x: None)
        oldtiprev = len(repo)

        pctx = repo['.']
        tmpwctx = pctx
        # The goal is to have a commit structure like so:
        # ...-> pctx -> tmpwctx -> shelvectx
        # where tmpwctx is an optional commit with the user's pending changes
        # and shelvectx is the unshelved changes. Then we merge it all down
        # to the original pctx.

        # Store pending changes in a commit and remember added in case a shelve
        # contains unknown files that are part of the pending change
        s = repo.status()
        addedbefore = frozenset(s.added)
        if s.modified or s.added or s.removed or s.deleted:
            ui.status(
                _("temporarily committing pending changes "
                  "(restore with 'hg unshelve --abort')\n"))

            def commitfunc(ui, repo, message, match, opts):
                hasmq = util.safehasattr(repo, 'mq')
                if hasmq:
                    saved, repo.mq.checkapplied = repo.mq.checkapplied, False

                backup = repo.ui.backupconfig('phases', 'new-commit')
                try:
                    repo.ui.setconfig('phases', 'new-commit', phases.secret)
                    return repo.commit(message, 'shelve@localhost',
                                       opts.get('date'), match)
                finally:
                    repo.ui.restoreconfig(backup)
                    if hasmq:
                        repo.mq.checkapplied = saved

            tempopts = {}
            tempopts['message'] = "pending changes temporary commit"
            tempopts['date'] = opts.get('date')
            ui.quiet = True
            node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
            tmpwctx = repo[node]

        ui.quiet = True
        shelvedfile(repo, basename, 'hg').applybundle()

        ui.quiet = oldquiet

        shelvectx = repo['tip']

        branchtorestore = ''
        if shelvectx.branch() != shelvectx.p1().branch():
            branchtorestore = shelvectx.branch()

        # If the shelve is not immediately on top of the commit
        # we'll be merging with, rebase it to be on top.
        if tmpwctx.node() != shelvectx.parents()[0].node():
            ui.status(_('rebasing shelved changes\n'))
            try:
                rebase.rebase(
                    ui, repo, **{
                        'rev': [shelvectx.rev()],
                        'dest': str(tmpwctx.rev()),
                        'keep': True,
                        'tool': opts.get('tool', ''),
                    })
            except error.InterventionRequired:
                tr.close()

                stripnodes = [
                    repo.changelog.node(rev)
                    for rev in xrange(oldtiprev, len(repo))
                ]
                shelvedstate.save(repo, basename, pctx, tmpwctx, stripnodes,
                                  branchtorestore)

                util.rename(repo.join('rebasestate'),
                            repo.join('unshelverebasestate'))
                raise error.InterventionRequired(
                    _("unresolved conflicts (see 'hg resolve', then "
                      "'hg unshelve --continue')"))

            # refresh ctx after rebase completes
            shelvectx = repo['tip']

            if not shelvectx in tmpwctx.children():
                # rebase was a no-op, so it produced no child commit
                shelvectx = tmpwctx

        mergefiles(ui, repo, pctx, shelvectx)
        restorebranch(ui, repo, branchtorestore)

        # Forget any files that were unknown before the shelve, unknown before
        # unshelve started, but are now added.
        shelveunknown = shelvectx.extra().get('shelve_unknown')
        if shelveunknown:
            shelveunknown = frozenset(shelveunknown.split('\0'))
            addedafter = frozenset(repo.status().added)
            toforget = (addedafter & shelveunknown) - addedbefore
            repo[None].forget(toforget)

        shelvedstate.clear(repo)

        # The transaction aborting will strip all the commits for us,
        # but it doesn't update the inmemory structures, so addchangegroup
        # hooks still fire and try to operate on the missing commits.
        # Clean up manually to prevent this.
        repo.unfiltered().changelog.strip(oldtiprev, tr)

        unshelvecleanup(ui, repo, basename, opts)

        _aborttransaction(repo)
    finally:
        ui.quiet = oldquiet
        if tr:
            tr.release()
        lockmod.release(lock)
        ui.restoreconfig(forcemerge)
Exemplo n.º 14
0
def checkparents(repo, state):
    """check parent while resuming an unshelve"""
    if state.parents != repo.dirstate.parents():
        raise error.Abort(
            _('working directory parents do not match unshelve '
              'state'))
Exemplo n.º 15
0
    def _connect(self):
        root = self.cvsroot
        conntype = None
        user, host = None, None
        cmd = ['cvs', 'server']

        self.ui.status(_("connecting to %s\n") % root)

        if root.startswith(":pserver:"):
            root = root[9:]
            m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
                         root)
            if m:
                conntype = "pserver"
                user, passw, serv, port, root = m.groups()
                if not user:
                    user = "******"
                if not port:
                    port = 2401
                else:
                    port = int(port)
                format0 = ":pserver:%s@%s:%s" % (user, serv, root)
                format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)

                if not passw:
                    passw = "A"
                    cvspass = os.path.expanduser("~/.cvspass")
                    try:
                        pf = open(cvspass, 'rb')
                        for line in pf.read().splitlines():
                            part1, part2 = line.split(' ', 1)
                            # /1 :pserver:[email protected]:2401/cvsroot/foo
                            # Ah<Z
                            if part1 == '/1':
                                part1, part2 = part2.split(' ', 1)
                                format = format1
                            # :pserver:[email protected]:/cvsroot/foo Ah<Z
                            else:
                                format = format0
                            if part1 == format:
                                passw = part2
                                break
                        pf.close()
                    except IOError as inst:
                        if inst.errno != errno.ENOENT:
                            if not getattr(inst, 'filename', None):
                                inst.filename = cvspass
                            raise

                sck = socket.socket()
                sck.connect((serv, port))
                sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
                                    "END AUTH REQUEST", ""]))
                if sck.recv(128) != "I LOVE YOU\n":
                    raise error.Abort(_("CVS pserver authentication failed"))

                self.writep = self.readp = sck.makefile('r+')

        if not conntype and root.startswith(":local:"):
            conntype = "local"
            root = root[7:]

        if not conntype:
            # :ext:user@host/home/user/path/to/cvsroot
            if root.startswith(":ext:"):
                root = root[5:]
            m = re.match(br'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
            # Do not take Windows path "c:\foo\bar" for a connection strings
            if os.path.isdir(root) or not m:
                conntype = "local"
            else:
                conntype = "rsh"
                user, host, root = m.group(1), m.group(2), m.group(3)

        if conntype != "pserver":
            if conntype == "rsh":
                rsh = encoding.environ.get("CVS_RSH") or "ssh"
                if user:
                    cmd = [rsh, '-l', user, host] + cmd
                else:
                    cmd = [rsh, host] + cmd

            # popen2 does not support argument lists under Windows
            cmd = [procutil.shellquote(arg) for arg in cmd]
            cmd = procutil.quotecommand(' '.join(cmd))
            self.writep, self.readp = procutil.popen2(cmd)

        self.realroot = root

        self.writep.write("Root %s\n" % root)
        self.writep.write("Valid-responses ok error Valid-requests Mode"
                          " M Mbinary E Checked-in Created Updated"
                          " Merged Removed\n")
        self.writep.write("valid-requests\n")
        self.writep.flush()
        r = self.readp.readline()
        if not r.startswith("Valid-requests"):
            raise error.Abort(_('unexpected response from CVS server '
                               '(expected "Valid-requests", but got %r)')
                             % r)
        if "UseUnchanged" in r:
            self.writep.write("UseUnchanged\n")
            self.writep.flush()
            r = self.readp.readline()
Exemplo n.º 16
0
def fastannotate(ui, repo, *pats, **opts):
    """show changeset information by line for each file

    List changes in files, showing the revision id responsible for each line.

    This command is useful for discovering when a change was made and by whom.

    By default this command prints revision numbers. If you include --file,
    --user, or --date, the revision number is suppressed unless you also
    include --number. The default format can also be customized by setting
    fastannotate.defaultformat.

    Returns 0 on success.

    .. container:: verbose

        This command uses an implementation different from the vanilla annotate
        command, which may produce slightly different (while still reasonable)
        outputs for some cases.

        Unlike the vanilla anootate, fastannotate follows rename regardless of
        the existence of --file.

        For the best performance when running on a full repo, use -c, -l,
        avoid -u, -d, -n. Use --linear and --no-content to make it even faster.

        For the best performance when running on a shallow (remotefilelog)
        repo, avoid --linear, --no-follow, or any diff options. As the server
        won't be able to populate annotate cache when non-default options
        affecting results are used.
    """
    if not pats:
        raise error.Abort(_('at least one filename or pattern is required'))

    # performance hack: filtered repo can be slow. unfilter by default.
    if ui.configbool('fastannotate', 'unfilteredrepo'):
        repo = repo.unfiltered()

    opts = pycompat.byteskwargs(opts)

    rev = opts.get('rev', '.')
    rebuild = opts.get('rebuild', False)

    diffopts = patch.difffeatureopts(ui,
                                     opts,
                                     section='annotate',
                                     whitespace=True)
    aopts = facontext.annotateopts(
        diffopts=diffopts,
        followmerge=not opts.get('linear', False),
        followrename=not opts.get('no_follow', False))

    if not any(
            opts.get(s)
            for s in ['user', 'date', 'file', 'number', 'changeset']):
        # default 'number' for compatibility. but fastannotate is more
        # efficient with "changeset", "line-number" and "no-content".
        for name in ui.configlist('fastannotate', 'defaultformat', ['number']):
            opts[name] = True

    ui.pager('fastannotate')
    template = opts.get('template')
    if template == 'json':
        formatter = faformatter.jsonformatter(ui, repo, opts)
    else:
        formatter = faformatter.defaultformatter(ui, repo, opts)
    showdeleted = opts.get('deleted', False)
    showlines = not bool(opts.get('no_content'))
    showpath = opts.get('file', False)

    # find the head of the main (master) branch
    master = ui.config('fastannotate', 'mainbranch') or rev

    # paths will be used for prefetching and the real annotating
    paths = list(_matchpaths(repo, rev, pats, opts, aopts))

    # for client, prefetch from the server
    if util.safehasattr(repo, 'prefetchfastannotate'):
        repo.prefetchfastannotate(paths)

    for path in paths:
        result = lines = existinglines = None
        while True:
            try:
                with facontext.annotatecontext(repo, path, aopts,
                                               rebuild) as a:
                    result = a.annotate(rev,
                                        master=master,
                                        showpath=showpath,
                                        showlines=(showlines
                                                   and not showdeleted))
                    if showdeleted:
                        existinglines = set((l[0], l[1]) for l in result)
                        result = a.annotatealllines(rev,
                                                    showpath=showpath,
                                                    showlines=showlines)
                break
            except (faerror.CannotReuseError, faerror.CorruptedFileError):
                # happens if master moves backwards, or the file was deleted
                # and readded, or renamed to an existing name, or corrupted.
                if rebuild:  # give up since we have tried rebuild already
                    raise
                else:  # try a second time rebuilding the cache (slow)
                    rebuild = True
                    continue

        if showlines:
            result, lines = result

        formatter.write(result, lines, existinglines=existinglines)
    formatter.end()
Exemplo n.º 17
0
 def getchanges(self, rev, full):
     if full:
         raise error.Abort(_("convert from cvs does not support --full"))
     self._parse()
     return sorted(self.files[rev].iteritems()), {}, set()
Exemplo n.º 18
0
 def bad(x, y):
     raise error.Abort("%s: %s" % (x, y))
Exemplo n.º 19
0
def push(repo, store, what, repo_heads, repo_branches, dry_run=False):
    def heads():
        for sha1 in store.heads(repo_branches):
            yield '^%s' % store.changeset_ref(sha1)

    def local_bases():
        h = chain(heads(), (w for w in what if w))
        for c, t, p in GitHgHelper.rev_list('--topo-order', '--full-history',
                                            '--boundary', *h):
            if c[0] != '-':
                continue
            yield store.hg_changeset(c[1:])

        for w in what:
            rev = store.hg_changeset(w)
            if rev:
                yield rev

    common = findcommon(repo, store, set(local_bases()))
    logging.info('common: %s', common)

    def revs():
        for sha1 in common:
            yield '^%s' % store.changeset_ref(sha1)

    revs = chain(revs(), (w for w in what if w))
    push_commits = list((c, p) for c, t, p in GitHgHelper.rev_list(
        '--topo-order', '--full-history', '--parents', '--reverse', *revs))

    pushed = False
    if push_commits:
        has_root = any(len(p) == 40 for p in push_commits)
        force = all(v[1] for v in what.values())
        if has_root and repo_heads:
            if not force:
                raise Exception('Cannot push a new root')
            else:
                logging.warn('Pushing a new root')
        if force:
            repo_heads = ['force']
        else:
            if not repo_heads:
                repo_heads = [NULL_NODE_ID]
            repo_heads = [unhexlify(h) for h in repo_heads]
    if push_commits and not dry_run:
        if repo.local():
            repo.local().ui.setconfig('server', 'validate', True)
        b2caps = bundle2caps(repo) if unbundle20 else {}
        logging.getLogger('bundle2').debug('%r', b2caps)
        if b2caps:
            b2caps['replycaps'] = encodecaps({'error': ['abort']})
        cg = create_bundle(store, push_commits, b2caps)
        if not isinstance(repo, HelperRepo):
            cg = util.chunkbuffer(cg)
            if not b2caps:
                cg = cg1unpacker(cg, 'UN')
        reply = repo.unbundle(cg, repo_heads, '')
        if unbundle20 and isinstance(reply, unbundle20):
            parts = iter(reply.iterparts())
            for part in parts:
                logging.getLogger('bundle2').debug('part: %s', part.type)
                logging.getLogger('bundle2').debug('params: %r', part.params)
                if part.type == 'output':
                    sys.stderr.write(part.read())
                elif part.type == 'reply:changegroup':
                    # TODO: should check params['in-reply-to']
                    reply = int(part.params['return'])
                elif part.type == 'error:abort':
                    raise error.Abort(part.params['message'],
                                      hint=part.params.get('hint'))
                else:
                    logging.getLogger('bundle2').warning(
                        'ignoring bundle2 part: %s', part.type)
        pushed = reply != 0
    return gitdag(push_commits) if pushed or dry_run else ()
Exemplo n.º 20
0
def replacechangesets(repo, oldnodes, createfn, backuptopic='replacing'):
    """Replace changesets with new versions.

    This is a generic function used to perform history rewriting.

    Given an iterable of input nodes, a function will be called which is
    expected to produce a new changeset to replace the input node. The
    function signature should be:

        def createfn(repo, ctx, revmap, copyfilectxfn):

    It is passed a repo, the changectx being rewritten, a map of old to new
    revisions that have been changed so far, and a function that can be used
    as the memctx callback for obtaining memfilectx when no file modifications
    are to be performed (a common pattern). The function should return an
    *uncommitted* memctx holding the new changeset info.

    We currently restrict that the createfn callback must return a new
    changeset and that no file changes may occur. Restricting file changes
    satisfies the requirements this function was invented for and keeps the
    implementation simple.

    After the memctx is obtained, it is committed. Children changesets are
    rebased automatically after all changesets have been rewritten.

    After the old to new mapping is obtained, bookmarks are moved and old
    changesets are made obsolete or stripped, depending on what is appropriate
    for the repo configuration.

    This function handles locking the repository and performing as many actions
    in a transaction as possible.

    Before any changes are made, we verify the state of the repo is sufficient
    for transformation to occur and abort otherwise.
    """
    if not oldnodes:
        return {}

    repo = repo.unfiltered()

    # Validate function called properly.
    for node in oldnodes:
        if len(node) != 20:
            raise error.Abort('replacechangesets expects 20 byte nodes')

    uoldrevs = [repo[node].rev() for node in oldnodes]
    oldrevs = sorted(uoldrevs)
    if oldrevs != uoldrevs:
        raise error.Abort('must pass oldnodes in changelog order')

    # We may perform stripping and stripping inside a nested transaction
    # is a recipe for disaster.
    # currenttransaction was added in 3.3. Copy the implementation until we
    # drop 3.2 compatibility.
    if hasattr(repo, 'currenttransaction'):
        intrans = repo.currenttransaction()
    else:
        if repo._transref and repo._transref().running():
            intrans = True
        else:
            intrans = False

    if intrans:
        raise error.Abort('cannot call replacechangesets when a transaction '
                          'is active')

    # The revisions impacted by the current operation. This is essentially
    # all non-hidden children. We don't operate on hidden changesets because
    # there is no point - they are hidden and deemed not important.
    impactedrevs = list(repo.filtered('visible').revs('%ld::', oldrevs))

    # If we'll need to update the working directory, don't do anything if there
    # are uncommitted changes, as this could cause a giant mess (merge
    # conflicts, etc). Note the comparison against impacted revs, as children
    # of rewritten changesets will be rebased below.
    dirstaterev = repo[repo.dirstate.p1()].rev()
    if dirstaterev in impactedrevs:
        cmdutil.checkunfinished(repo)
        cmdutil.bailifchanged(repo)

    obsenabled = False
    if hasattr(obsolete, 'isenabled'):
        obsenabled = obsolete.isenabled(repo, 'createmarkers')
    else:
        obsenabled = obsolete._enabled

    def adjustphase(repo, tr, phase, node):
        # transaction argument added in Mercurial 3.2.
        try:
            phases.advanceboundary(repo, tr, phase, [node])
            phases.retractboundary(repo, tr, phase, [node])
        except TypeError:
            phases.advanceboundary(repo, phase, [node])
            phases.retractboundary(repo, phase, [node])

    nodemap = {}
    wlock, lock, tr = None, None, None
    try:
        wlock = repo.wlock()
        lock = repo.lock()
        tr = repo.transaction('replacechangesets')

        # Create the new changesets.
        revmap = OrderedDict()
        for oldnode in oldnodes:
            oldctx = repo[oldnode]

            # Copy revmap out of paranoia.
            newctx = createfn(repo, oldctx, dict(revmap),
                              preservefilectx(oldctx))

            if not isinstance(newctx, context.memctx):
                raise error.Abort('createfn must return a context.memctx')

            if oldctx == newctx:
                raise error.Abort('createfn must create a new changeset')

            newnode = newctx.commit()
            # Needed so .manifestnode() works, which memctx doesn't have.
            newctx = repo[newnode]

            # This makes the implementation significantly simpler as we don't
            # need to worry about merges when we do auto rebasing later.
            if oldctx.manifestnode() != newctx.manifestnode():
                raise error.Abort(
                    'we do not allow replacements to modify files')

            revmap[oldctx.rev()] = newctx.rev()
            nodemap[oldnode] = newnode

            # Do phase adjustment ourselves because we want callbacks to be as
            # dumb as possible.
            adjustphase(repo, tr, oldctx.phase(), newctx.node())

        # Children of rewritten changesets are impacted as well. Rebase as
        # needed.
        for rev in impactedrevs:
            # It was handled by createfn() or by this loop already.
            if rev in revmap:
                continue

            oldctx = repo[rev]
            if oldctx.p1().rev() not in revmap:
                raise error.Abort('unknown parent of child commit: %s' %
                                  oldctx.hex(),
                                  hint='please report this as a bug')

            parents = newparents(repo, oldctx, revmap)
            mctx = context.memctx(repo,
                                  parents,
                                  oldctx.description(),
                                  oldctx.files(),
                                  preservefilectx(oldctx),
                                  user=oldctx.user(),
                                  date=oldctx.date(),
                                  extra=oldctx.extra())
            status = oldctx.p1().status(oldctx)
            mctx.modified = lambda: status[0]
            mctx.added = lambda: status[1]
            mctx.removed = lambda: status[2]

            newnode = mctx.commit()
            revmap[rev] = repo[newnode].rev()
            nodemap[oldctx.node()] = newnode

            # Retain phase.
            adjustphase(repo, tr, oldctx.phase(), newnode)

            ph = repo.ui.config('phases', 'new-commit')
            try:
                repo.ui.setconfig('phases', 'new-commit', oldctx.phase(),
                                  'rewriting')
                newnode = mctx.commit()
                revmap[rev] = repo[newnode].rev()
            finally:
                repo.ui.setconfig('phases', 'new-commit', ph)

        # Move bookmarks to new nodes.
        bmchanges = []
        oldactivebookmark = activebookmark(repo)

        for oldrev, newrev in revmap.items():
            oldnode = repo[oldrev].node()
            for mark, bmnode in repo._bookmarks.items():
                if bmnode == oldnode:
                    bmchanges.append((mark, repo[newrev].node()))

        if bmchanges:
            # TODO unconditionally call applychanges() when support for
            # Mercurial 4.1 is dropped.
            if util.safehasattr(repo._bookmarks, 'applychanges'):
                repo._bookmarks.applychanges(repo, tr, bmchanges)
            else:
                for mark, newnode in bmchanges:
                    repo._bookmarks[mark] = newnode

                repo._bookmarks.recordchange(tr)

        # Update references to rewritten MQ patches.
        if hasattr(repo, 'mq'):
            q = repo.mq
            for e in q.applied:
                if e.node in nodemap:
                    e.node = nodemap[e.node]
                    q.applieddirty = True

            # This no-ops if nothing is dirty.
            q.savedirty()

        # If obsolescence is enabled, obsolete the old changesets.
        if obsenabled:
            markers = []
            for oldrev, newrev in revmap.items():
                if repo[oldrev] != repo[newrev]:
                    markers.append((repo[oldrev], (repo[newrev], )))
            if markers:
                obsolete.createmarkers(repo, markers)

        # Move the working directory to the new node, if applicable.
        wdirrev = repo['.'].rev()
        if wdirrev in revmap:
            hg.updaterepo(repo, repo[revmap[wdirrev]].node(), True)

        # The active bookmark is tracked by its symbolic name, not its
        # changeset. Since we didn't do anything that should change the
        # active bookmark, we shouldn't need to adjust it.
        if activebookmark(repo) != oldactivebookmark:
            raise error.Abort(
                'active bookmark changed; '
                'this should not occur!',
                hint='please file a bug')

        tr.close()

        # Unless obsolescence is enabled, strip any obsolete changesets.
        if not obsenabled:
            stripnodes = []
            for oldrev, newrev in revmap.items():
                if repo[oldrev] != repo[newrev]:
                    stripnodes.append(repo[oldrev].node())
            if stripnodes:
                repair.strip(repo.ui, repo, stripnodes, topic=backuptopic)

    finally:
        if tr:
            tr.release()
        lockmod.release(wlock, lock)

    return nodemap
Exemplo n.º 21
0
 def numcommits(self):
     output, ret = self.gitrunlines('rev-list', '--all')
     if ret:
         raise error.Abort(_('cannot retrieve number of commits in %s') \
                           % self.path)
     return len(output)
Exemplo n.º 22
0
def commands_branch(orig, ui, repo, label=None, **opts):
    if label and not opts.get(r'clean') and not opts.get(r'rev'):
        raise error.Abort(
         _("creating named branches is disabled and you should use bookmarks"),
            hint="see 'hg help bookflow'")
    return orig(ui, repo, label, **opts)
Exemplo n.º 23
0
    def toposort(self, parents, sortmode):
        '''Return an ordering such that every uncommitted changeset is
        preceded by all its uncommitted ancestors.'''

        def mapchildren(parents):
            """Return a (children, roots) tuple where 'children' maps parent
            revision identifiers to children ones, and 'roots' is the list of
            revisions without parents. 'parents' must be a mapping of revision
            identifier to its parents ones.
            """
            visit = sorted(parents)
            seen = set()
            children = {}
            roots = []

            while visit:
                n = visit.pop(0)
                if n in seen:
                    continue
                seen.add(n)
                # Ensure that nodes without parents are present in the
                # 'children' mapping.
                children.setdefault(n, [])
                hasparent = False
                for p in parents[n]:
                    if p not in self.map:
                        visit.append(p)
                        hasparent = True
                    children.setdefault(p, []).append(n)
                if not hasparent:
                    roots.append(n)

            return children, roots

        # Sort functions are supposed to take a list of revisions which
        # can be converted immediately and pick one

        def makebranchsorter():
            """If the previously converted revision has a child in the
            eligible revisions list, pick it. Return the list head
            otherwise. Branch sort attempts to minimize branch
            switching, which is harmful for Mercurial backend
            compression.
            """
            prev = [None]
            def picknext(nodes):
                next = nodes[0]
                for n in nodes:
                    if prev[0] in parents[n]:
                        next = n
                        break
                prev[0] = next
                return next
            return picknext

        def makesourcesorter():
            """Source specific sort."""
            keyfn = lambda n: self.commitcache[n].sortkey
            def picknext(nodes):
                return sorted(nodes, key=keyfn)[0]
            return picknext

        def makeclosesorter():
            """Close order sort."""
            keyfn = lambda n: ('close' not in self.commitcache[n].extra,
                               self.commitcache[n].sortkey)
            def picknext(nodes):
                return sorted(nodes, key=keyfn)[0]
            return picknext

        def makedatesorter():
            """Sort revisions by date."""
            dates = {}
            def getdate(n):
                if n not in dates:
                    dates[n] = util.parsedate(self.commitcache[n].date)
                return dates[n]

            def picknext(nodes):
                return min([(getdate(n), n) for n in nodes])[1]

            return picknext

        if sortmode == 'branchsort':
            picknext = makebranchsorter()
        elif sortmode == 'datesort':
            picknext = makedatesorter()
        elif sortmode == 'sourcesort':
            picknext = makesourcesorter()
        elif sortmode == 'closesort':
            picknext = makeclosesorter()
        else:
            raise error.Abort(_('unknown sort mode: %s') % sortmode)

        children, actives = mapchildren(parents)

        s = []
        pendings = {}
        while actives:
            n = picknext(actives)
            actives.remove(n)
            s.append(n)

            # Update dependents list
            for c in children.get(n, []):
                if c not in pendings:
                    pendings[c] = [p for p in parents[c] if p not in self.map]
                try:
                    pendings[c].remove(n)
                except ValueError:
                    raise error.Abort(_('cycle detected between %s and %s')
                                       % (recode(c), recode(n)))
                if not pendings[c]:
                    # Parents are converted, node is eligible
                    actives.insert(0, c)
                    pendings[c] = None

        if len(s) != len(parents):
            raise error.Abort(_("not all revisions were sorted"))

        return s
Exemplo n.º 24
0
Arquivo: gpg.py Projeto: anydeploy/hg
def _dosign(ui, repo, *revs, **opts):
    mygpg = newgpg(ui, **opts)
    sigver = "0"
    sigmessage = ""

    date = opts.get('date')
    if date:
        opts['date'] = util.parsedate(date)

    if revs:
        nodes = [repo.lookup(n) for n in revs]
    else:
        nodes = [
            node for node in repo.dirstate.parents() if node != hgnode.nullid
        ]
        if len(nodes) > 1:
            raise error.Abort(
                _('uncommitted merge - please provide a '
                  'specific revision'))
        if not nodes:
            nodes = [repo.changelog.tip()]

    for n in nodes:
        hexnode = hgnode.hex(n)
        ui.write(
            _("signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n)))
        # build data
        data = node2txt(repo, n, sigver)
        sig = mygpg.sign(data)
        if not sig:
            raise error.Abort(_("error while signing"))
        sig = binascii.b2a_base64(sig)
        sig = sig.replace("\n", "")
        sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)

    # write it
    if opts['local']:
        repo.vfs.append("localsigs", sigmessage)
        return

    if not opts["force"]:
        msigs = match.exact(repo.root, '', ['.hgsigs'])
        if any(repo.status(match=msigs, unknown=True, ignored=True)):
            raise error.Abort(_("working copy of .hgsigs is changed "),
                              hint=_("please commit .hgsigs manually"))

    sigsfile = repo.wfile(".hgsigs", "ab")
    sigsfile.write(sigmessage)
    sigsfile.close()

    if '.hgsigs' not in repo.dirstate:
        repo[None].add([".hgsigs"])

    if opts["no_commit"]:
        return

    message = opts['message']
    if not message:
        # we don't translate commit messages
        message = "\n".join([
            "Added signature for changeset %s" % hgnode.short(n) for n in nodes
        ])
    try:
        editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
        repo.commit(message,
                    opts['user'],
                    opts['date'],
                    match=msigs,
                    editor=editor)
    except ValueError as inst:
        raise error.Abort(str(inst))
Exemplo n.º 25
0
def checkout(ui, repo, *args, **kwargs):
    cmdoptions = [
        (b'b', b'branch', b'', b''),
        (b'B', b'branch', b'', b''),
        (b'f', b'force', None, b''),
        (b'p', b'patch', None, b''),
    ]
    paths = []
    if b'--' in args:
        sepindex = args.index(b'--')
        paths.extend(args[sepindex + 1 :])
        args = args[:sepindex]

    args, opts = parseoptions(ui, cmdoptions, args)

    rev = None
    if args and ispath(repo, args[0]):
        paths = args + paths
    elif args:
        rev = args[0]
        paths = args[1:] + paths

    cmd = Command(b'update')

    if opts.get(b'force'):
        if paths or rev:
            cmd[b'-C'] = None

    if opts.get(b'patch'):
        cmd = Command(b'revert')
        cmd[b'-i'] = None

    if opts.get(b'branch'):
        if len(args) == 0:
            cmd = Command(b'bookmark')
            cmd.append(opts.get(b'branch'))
        else:
            cmd.append(args[0])
            bookcmd = Command(b'bookmark')
            bookcmd.append(opts.get(b'branch'))
            cmd = cmd & bookcmd
    # if there is any path argument supplied, use revert instead of update
    elif len(paths) > 0:
        ui.status(_(b"note: use --no-backup to avoid creating .orig files\n\n"))
        cmd = Command(b'revert')
        if opts.get(b'patch'):
            cmd[b'-i'] = None
        if rev:
            cmd[b'-r'] = rev
        cmd.extend(paths)
    elif rev:
        if opts.get(b'patch'):
            cmd[b'-r'] = rev
        else:
            cmd.append(rev)
    elif opts.get(b'force'):
        cmd = Command(b'revert')
        cmd[b'--all'] = None
    else:
        raise error.Abort(_(b"a commit must be specified"))

    ui.status((bytes(cmd)), b"\n")
Exemplo n.º 26
0
Arquivo: gpg.py Projeto: anydeploy/hg
def node2txt(repo, node, ver):
    """map a manifest into some text"""
    if ver == "0":
        return "%s\n" % hgnode.hex(node)
    else:
        raise error.Abort(_("unknown signature version"))
Exemplo n.º 27
0
def review(ui, repo, *paths, **opts):
    '''
      Uploads a review to https://codereview.adblockplus.org/ or updates an
      existing review request. This will always send mails for new reviews, when
      updating a review mails will only be sent if a message is given.
    '''
    args = ['--oauth2', '--server', SERVER]
    if ui.debugflag:
        args.append('--noisy')
    elif ui.verbose:
        args.append('--verbose')
    elif ui.quiet:
        args.append('--quiet')

    if not opts.get('issue') or opts.get('message'):
        args.append('--send_mail')

    if opts.get('revision') and opts.get('change'):
        raise error.Abort(
            'Ambiguous revision range, only one of --revision and --change can be specified.'
        )
    if opts.get('change'):
        rev = repo[opts['change']]
        args.extend(['--rev', '{}:{}'.format(rev.parents()[0], rev)])
    elif opts.get('revision'):
        args.extend(['--rev', opts['revision']])
    else:
        raise error.Abort(
            'What should be reviewed? Either --revision or --change is required.'
        )

    if not opts.get('issue'):
        # New issue, make sure title and message are set
        if not opts.get('title') and opts.get('change'):
            opts['title'] = repo[opts['change']].description()
        if not opts.get('title'):
            opts['title'] = ui.prompt('New review title: ', '')
        if not opts['title'].strip():
            raise error.Abort('No review title given.')

        if not opts.get('message'):
            opts['message'] = opts['title']

        path = (ui.config('paths', 'default-push')
                or ui.config('paths', 'default') or '')
        match = re.search(r'^(?:https://|ssh://hg@)(.*)', path)
        if match:
            opts['base_url'] = 'https://' + match.group(1)

        # Make sure there is at least one reviewer
        if not opts.get('reviewers'):
            opts['reviewers'] = ui.prompt('Reviewers (comma-separated): ', '')
        if not opts['reviewers'].strip():
            raise error.Abort('No reviewers given.')

    for opt in ('reviewers', 'cc'):
        if opts.get(opt):
            users = [
                u if '@' in u else u + '@adblockplus.org'
                for u in re.split(r'\s*,\s*', opts[opt])
            ]
            opts[opt] = ','.join(users)

    for opt in ('issue', 'title', 'message', 'reviewers', 'cc', 'base_url'):
        if opts.get(opt, ''):
            args.extend(['--' + opt, opts[opt]])

    for opt in ('private', 'assume_yes', 'print_diffs'):
        if opts.get(opt, False):
            args.append('--' + opt)

    args.extend(paths)

    upload_path = ui.config('review', 'uploadtool_path',
                            os.path.join('~', '.hgreview_upload.py'))
    upload_path = os.path.expanduser(upload_path)
    if not os.path.exists(upload_path):
        ui.status('Downloading {0} to {1}.\n'.format(UPLOADTOOL_URL,
                                                     upload_path))
        urllib.urlretrieve(UPLOADTOOL_URL, upload_path)

    # Find an available port for our local server
    issue = None

    class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
        def do_GET(self):
            self.send_response(200)
            self.send_header('Content-type', 'text/javascript')
            self.end_headers()
            self.wfile.write('location.href = "{0}";'.format(SERVER + '/' +
                                                             issue))

        def log_message(*args, **kwargs):
            pass

    for port in range(54770, 54780):
        try:
            server = BaseHTTPServer.HTTPServer(('localhost', port),
                                               RequestHandler)
            break
        except socket.error:
            pass

    # Modify upload tool's auth response in order to redirect to the issue
    scope = {}
    execfile(upload_path, scope)
    if server:
        scope['AUTH_HANDLER_RESPONSE'] = '''\
<html>
  <head>
    <title>Authentication Status</title>
    <script>
    window.onload = function()
    {
      setInterval(function()
      {
        var script = document.createElement("script");
        script.src = "http://localhost:%s/?" + (new Date().getTime());
        document.body.appendChild(script);
      }, 1000)
    }
    </script>
  </head>
  <body>
    <p>
      The authentication flow has completed. This page will redirect to your
      review shortly.
    </p>
  </body>
</html>
''' % port

    # Run the upload tool
    issue, patchset = scope['RealMain']([upload_path] + args)

    # Wait for the page to check in and retrieve issue URL
    if server:
        server.handle_request()
Exemplo n.º 28
0
def unifyrepo(ui, settings):
    """Unify the contents of multiple source repositories using settings.

    The settings file is a Mercurial config file (basically an INI file).
    """
    conf = unifyconfig(settings)

    # Ensure destrepo is created with generaldelta enabled.
    ui.setconfig('format', 'usegeneraldelta', True)
    ui.setconfig('format', 'generaldelta', True)

    # Verify all source repos have the same revision 0
    rev0s = set()
    for source in conf.sources:
        repo = hg.repository(ui, path=source['path'])

        # Verify
        node = repo[0].node()
        if rev0s and node not in rev0s:
            ui.warn('repository has different rev 0: %s\n' % source['name'])

        rev0s.add(node)

    # Ensure the staging repo has all changesets from the source repos.

    stageui = ui.copy()

    # Enable aggressive merge deltas on the stage repo to minimize manifest delta
    # size. This could make delta chains very long. So we may want to institute a
    # delta chain cap on the destination repo. But this will ensure the stage repo
    # has the most efficient/compact representation of deltas. Pulling from this
    # repo will also inherit the optimal delta, so we don't need to enable
    # aggressivemergedeltas on the destination repo.
    stageui.setconfig('format', 'aggressivemergedeltas', True)

    stagerepo = hg.repository(stageui,
                              path=conf.stagepath,
                              create=not os.path.exists(conf.stagepath))

    for source in conf.sources:
        path = source['path']
        sourcepeer = hg.peer(ui, {}, path)
        ui.write('pulling %s into %s\n' % (path, conf.stagepath))
        exchange.pull(stagerepo, sourcepeer)

    # Now collect all the changeset data with pushlog info.
    # node -> (when, source, rev, who, pushid)
    nodepushinfo = {}
    pushcount = 0
    allnodes = set()

    # Obtain pushlog data from each source repo. We obtain data for every node
    # and filter later because we want to be sure we have the earliest known
    # push data for a given node.
    for source in conf.sources:
        sourcerepo = hg.repository(ui, path=source['path'])
        pushlog = getattr(sourcerepo, 'pushlog', None)
        if not pushlog:
            raise error.Abort('pushlog API not available',
                              hint='is the pushlog extension loaded?')

        index = sourcerepo.changelog.index
        revnode = {}
        for rev in sourcerepo:
            # revlog.node() is too slow. Use the index directly.
            node = index[rev][7]
            revnode[rev] = node
            allnodes.add(node)

        noderev = {v: k for k, v in revnode.iteritems()}

        localpushcount = 0
        pushnodecount = 0
        for pushid, who, when, nodes in pushlog.pushes():
            pushcount += 1
            localpushcount += 1
            for node in nodes:
                pushnodecount += 1
                bnode = bin(node)

                # There is a race between us iterating the repo and querying the
                # pushlog. A new changeset could be written between when we
                # obtain nodes and encounter the pushlog. So ignore pushlog
                # for nodes we don't know about.
                if bnode not in noderev:
                    ui.warn('pushlog entry for unknown node: %s; '
                            'possible race condition?\n' % node)
                    continue

                rev = noderev[bnode]

                if bnode not in nodepushinfo:
                    nodepushinfo[bnode] = (when, path, rev, who, pushid)
                else:
                    currentwhen = nodepushinfo[bnode][0]
                    if when < currentwhen:
                        nodepushinfo[bnode] = (when, path, rev, who, pushid)

        ui.write(
            'obtained pushlog info for %d/%d revisions from %d pushes from %s\n'
            % (pushnodecount, len(revnode), localpushcount, source['name']))

    # Now verify that every node in the source repos has pushlog data.
    missingpl = allnodes - set(nodepushinfo.keys())
    if missingpl:
        raise error.Abort('missing pushlog info for %d nodes\n' %
                          len(missingpl))

    # Filter out changesets we aren't aggregating.
    # We also use this pass to identify which nodes to bookmark.
    books = {}
    sourcenodes = set()
    for source in conf.sources:
        sourcerepo = hg.repository(ui, path=source['path'])
        cl = sourcerepo.changelog
        index = cl.index

        sourcerevs = sourcerepo.revs(source['pullrevs'])
        sourcerevs.sort()
        headrevs = set(cl.headrevs())
        sourceheadrevs = headrevs & set(sourcerevs)

        # We /could/ allow multiple heads from each source repo. But for now
        # it is easier to limit to 1 head per source.
        if len(sourceheadrevs) > 1:
            raise error.Abort(
                '%s has %d heads' % (source['name'], len(sourceheadrevs)),
                hint='define pullrevs to limit what is aggregated')

        for rev in cl:
            if rev not in sourcerevs:
                continue

            node = index[rev][7]
            sourcenodes.add(node)
            if source['bookmark']:
                books[source['bookmark']] = node

        ui.write(
            'aggregating %d/%d revisions for %d heads from %s\n' %
            (len(sourcerevs), len(cl), len(sourceheadrevs), source['name']))

    nodepushinfo = {
        k: v
        for k, v in nodepushinfo.iteritems() if k in sourcenodes
    }

    ui.write('aggregating %d/%d nodes from %d original pushes\n' %
             (len(nodepushinfo), len(allnodes), pushcount))

    # We now have accounting for every changeset. Because pulling changesets
    # is a bit time consuming, it is worthwhile to minimize the number of pull
    # operations. We do this by ordering all changesets by original push time
    # then emitting the minimum number of "fast forward" nodes from the tip
    # of each linear range inside that list.

    # (time, source, rev, user, pushid) -> node
    inversenodeinfo = {v: k for k, v in nodepushinfo.iteritems()}

    destui = ui.copy()
    destui.setconfig('format', 'aggressivemergedeltas', True)
    destui.setconfig('format', 'maxchainlen', 10000)

    destrepo = hg.repository(destui,
                             path=conf.destpath,
                             create=not os.path.exists(conf.destpath))
    destcl = destrepo.changelog
    pullpushinfo = {
        k: v
        for k, v in inversenodeinfo.iteritems() if not destcl.hasnode(v)
    }

    ui.write('%d/%d nodes will be pulled\n' %
             (len(pullpushinfo), len(inversenodeinfo)))

    pullnodes = list(emitfastforwardnodes(stagerepo, pullpushinfo))
    unifiedpushes = list(unifypushes(inversenodeinfo))

    ui.write('consolidated into %d pulls from %d unique pushes\n' %
             (len(pullnodes), len(unifiedpushes)))

    if not pullnodes:
        ui.write('nothing to do; exiting\n')
        return

    stagepeer = hg.peer(ui, {}, conf.stagepath)

    for node in pullnodes:
        # TODO Bug 1265002 - we should update bookmarks when we pull.
        # Otherwise the changesets will get replicated without a bookmark
        # and any poor soul who pulls will see a nameless head.
        exchange.pull(destrepo, stagepeer, heads=[node])
        # For some reason there is a massive memory leak (10+ MB per
        # iteration on Firefox repos) if we don't gc here.
        gc.collect()

    # Now that we've aggregated all the changesets in the destination repo,
    # define the pushlog entries.
    pushlog = getattr(destrepo, 'pushlog', None)
    if not pushlog:
        raise error.Abort('pushlog API not available',
                          hint='is the pushlog extension loaded?')

    with destrepo.lock():
        with destrepo.transaction('pushlog') as tr:
            insertpushes = list(newpushes(destrepo, unifiedpushes))
            ui.write('inserting %d pushlog entries\n' % len(insertpushes))
            pushlog.recordpushes(insertpushes, tr=tr)

    # Verify that pushlog time in revision order is always increasing.
    destnodepushtime = {}
    for push in destrepo.pushlog.pushes():
        for node in push.nodes:
            destnodepushtime[bin(node)] = push.when

    destcl = destrepo.changelog
    lastpushtime = 0
    for rev in destrepo:
        node = destcl.node(rev)
        pushtime = destnodepushtime[node]

        if pushtime < lastpushtime:
            ui.warn('push time for %d is older than %d\n' % (rev, rev - 1))

        lastpushtime = pushtime

    # Write bookmarks.
    ui.write('writing %d bookmarks\n' % len(books))

    with destrepo.lock():
        with destrepo.transaction('bookmarks') as tr:
            bm = bookmarks.bmstore(destrepo)
            # Mass replacing may not be the proper strategy. But it works for
            # our current use case.
            bm.clear()
            bm.update(books)
            bm.recordchange(tr)

    # This is a bit hacky. Pushlog and bookmarks aren't currently replicated
    # via the normal hooks mechanism because we use the low-level APIs to
    # write them. So, we send a replication message to sync the entire repo.
    try:
        vcsr = extensions.find('vcsreplicator')
    except KeyError:
        raise error.Abort(
            'vcsreplicator extension not installed; '
            'pushlog and bookmarks may not be replicated properly')

    vcsr.replicatecommand(destrepo.ui, destrepo)
Exemplo n.º 29
0
    def applyone(self,
                 repo,
                 node,
                 cl,
                 patchfile,
                 merge=False,
                 log=False,
                 filter=None):
        '''apply the patch in patchfile to the repository as a transplant'''
        (manifest, user, (time, timezone), files, message) = cl[:5]
        date = "%d %d" % (time, timezone)
        extra = {'transplant_source': node}
        if filter:
            (user, date, message) = self.filter(filter, node, cl, patchfile)

        if log:
            # we don't translate messages inserted into commits
            message += '\n(transplanted from %s)' % nodemod.hex(node)

        self.ui.status(_('applying %s\n') % nodemod.short(node))
        self.ui.note('%s %s\n%s\n' % (user, date, message))

        if not patchfile and not merge:
            raise error.Abort(_('can only omit patchfile if merging'))
        if patchfile:
            try:
                files = set()
                patch.patch(self.ui,
                            repo,
                            patchfile,
                            files=files,
                            eolmode=None)
                files = list(files)
            except Exception as inst:
                seriespath = os.path.join(self.path, 'series')
                if os.path.exists(seriespath):
                    os.unlink(seriespath)
                p1 = repo.dirstate.p1()
                p2 = node
                self.log(user, date, message, p1, p2, merge=merge)
                self.ui.write(stringutil.forcebytestr(inst) + '\n')
                raise TransplantError(
                    _('fix up the working directory and run '
                      'hg transplant --continue'))
        else:
            files = None
        if merge:
            p1 = repo.dirstate.p1()
            repo.setparents(p1, node)
            m = match.always()
        else:
            m = match.exact(files)

        n = repo.commit(message,
                        user,
                        date,
                        extra=extra,
                        match=m,
                        editor=self.getcommiteditor())
        if not n:
            self.ui.warn(
                _('skipping emptied changeset %s\n') % nodemod.short(node))
            return None
        if not merge:
            self.transplants.set(n, node)

        return n
Exemplo n.º 30
0
    def prefetch(self,
                 fileids,
                 force=False,
                 fetchdata=True,
                 fetchhistory=False):
        """downloads the given file versions to the cache
        """
        repo = self.repo
        idstocheck = []
        for file, id in fileids:
            # hack
            # - we don't use .hgtags
            # - workingctx produces ids with length 42,
            #   which we skip since they aren't in any cache
            if (file == '.hgtags' or len(id) == 42
                    or not repo.shallowmatch(file)):
                continue

            idstocheck.append((file, bin(id)))

        datastore = self.datastore
        historystore = self.historystore
        if force:
            datastore = contentstore.unioncontentstore(*repo.shareddatastores)
            historystore = metadatastore.unionmetadatastore(
                *repo.sharedhistorystores)

        missingids = set()
        if fetchdata:
            missingids.update(datastore.getmissing(idstocheck))
        if fetchhistory:
            missingids.update(historystore.getmissing(idstocheck))

        # partition missing nodes into nullid and not-nullid so we can
        # warn about this filtering potentially shadowing bugs.
        nullids = len([None for unused, id in missingids if id == nullid])
        if nullids:
            missingids = [(f, id) for f, id in missingids if id != nullid]
            repo.ui.develwarn(('remotefilelog not fetching %d null revs'
                               ' - this is likely hiding bugs' % nullids),
                              config='remotefilelog-ext')
        if missingids:
            global fetches, fetched, fetchcost
            fetches += 1

            # We want to be able to detect excess individual file downloads, so
            # let's log that information for debugging.
            if fetches >= 15 and fetches < 18:
                if fetches == 15:
                    fetchwarning = self.ui.config('remotefilelog',
                                                  'fetchwarning')
                    if fetchwarning:
                        self.ui.warn(fetchwarning + '\n')
                self.logstacktrace()
            missingids = [(file, hex(id)) for file, id in sorted(missingids)]
            fetched += len(missingids)
            start = time.time()
            missingids = self.request(missingids)
            if missingids:
                raise error.Abort(
                    _("unable to download %d files") % len(missingids))
            fetchcost += time.time() - start
            self._lfsprefetch(fileids)