Example #1
0
def unshelveabort(ui, repo, state, opts):
    """subcommand that abort an in-progress unshelve"""
    wlock = repo.wlock()
    lock = None
    try:
        checkparents(repo, state)
        lock = repo.lock()
        merge.mergestate(repo).reset()
        if opts['keep']:
            repo.setparents(repo.dirstate.parents()[0])
        else:
            revertfiles = readshelvedfiles(repo, state.name)
            wctx = repo.parents()[0]
            cmdutil.revert(ui, repo, wctx, [wctx.node(), nullid],
                           *revertfiles, **{'no_backup': True})
            # fix up the weird dirstate states the merge left behind
            mf = wctx.manifest()
            dirstate = repo.dirstate
            for f in revertfiles:
                if f in mf:
                    dirstate.normallookup(f)
                else:
                    dirstate.drop(f)
            dirstate._pl = (wctx.node(), nullid)
            dirstate._dirty = True
        repair.strip(ui, repo, state.stripnodes, backup='none', topic='shelve')
        shelvedstate.clear(repo)
        ui.warn(_("unshelve of '%s' aborted\n") % state.name)
    finally:
        lockmod.release(lock, wlock)
Example #2
0
def mergefiles(ui, repo, wctx, shelvectx):
    """updates to wctx and merges the changes from shelvectx into the
    dirstate."""
    with ui.configoverride({('ui', 'quiet'): True}):
        hg.update(repo, wctx.node())
        files = []
        files.extend(shelvectx.files())
        files.extend(shelvectx.parents()[0].files())

        # revert will overwrite unknown files, so move them out of the way
        for file in repo.status(unknown=True).unknown:
            if file in files:
                util.rename(file, scmutil.origpath(ui, repo, file))
        ui.pushbuffer(True)
        cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
                       *pathtofiles(repo, files), **{r'no_backup': True})
        ui.popbuffer()
Example #3
0
def applychanges(ui, repo, ctx, opts):
    """Merge changeset from ctx (only) in the current working directory"""
    wcpar = repo.dirstate.parents()[0]
    if ctx.p1().node() == wcpar:
        # edition ar "in place" we do not need to make any merge,
        # just applies changes on parent for edition
        cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
        stats = None
    else:
        try:
            # ui.forcemerge is an internal variable, do not document
            repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
                              'histedit')
            stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
        finally:
            repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
    return stats
Example #4
0
def applychanges(ui, repo, ctx, opts):
    """Merge changeset from ctx (only) in the current working directory"""
    wcpar = repo.dirstate.parents()[0]
    if ctx.p1().node() == wcpar:
        # edition ar "in place" we do not need to make any merge,
        # just applies changes on parent for edition
        cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
        stats = None
    else:
        try:
            # ui.forcemerge is an internal variable, do not document
            repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
                              'histedit')
            stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
        finally:
            repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
    return stats
Example #5
0
def dosplit(ui, repo, tr, ctx, opts):
    committed = []  # [ctx]

    # Set working parent to ctx.p1(), and keep working copy as ctx's content
    # NOTE: if we can have "update without touching working copy" API, the
    # revert step could be cheaper.
    hg.clean(repo, ctx.p1().node(), show_stats=False)
    parents = repo.changelog.parents(ctx.node())
    ui.pushbuffer()
    cmdutil.revert(ui, repo, ctx, parents)
    ui.popbuffer()  # discard "reverting ..." messages

    # Any modified, added, removed, deleted result means split is incomplete
    incomplete = lambda repo: any(repo.status()[:4])

    # Main split loop
    while incomplete(repo):
        if committed:
            header = (_('HG: Splitting %s. So far it has been split into:\n') %
                      short(ctx.node()))
            for c in committed:
                firstline = c.description().split('\n', 1)[0]
                header += _('HG: - %s: %s\n') % (short(c.node()), firstline)
            header += _('HG: Write commit message for the next split '
                        'changeset.\n')
        else:
            header = _('HG: Splitting %s. Write commit message for the '
                       'first split changeset.\n') % short(ctx.node())
        opts.update({
            'edit': True,
            'interactive': True,
            'message': header + ctx.description(),
        })
        commands.commit(ui, repo, **pycompat.strkwargs(opts))
        newctx = repo['.']
        committed.append(newctx)

    if not committed:
        raise error.Abort(_('cannot split an empty revision'))

    scmutil.cleanupnodes(repo, {ctx.node(): [c.node() for c in committed]},
                         operation='split',
                         fixphase=True)

    return committed[-1]
Example #6
0
def mergefiles(ui, repo, wctx, shelvectx):
    """updates to wctx and merges the changes from shelvectx into the
    dirstate."""
    with ui.configoverride({('ui', 'quiet'): True}):
        hg.update(repo, wctx.node())
        files = []
        files.extend(shelvectx.files())
        files.extend(shelvectx.parents()[0].files())

        # revert will overwrite unknown files, so move them out of the way
        for file in repo.status(unknown=True).unknown:
            if file in files:
                util.rename(file, scmutil.origpath(ui, repo, file))
        ui.pushbuffer(True)
        cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
                       *pathtofiles(repo, files),
                       **{'no_backup': True})
        ui.popbuffer()
Example #7
0
def applychanges(ui, repo, ctx, opts):
    """Merge changeset from ctx (only) in the current working directory"""
    wcpar = repo.dirstate.parents()[0]
    if ctx.p1().node() == wcpar:
        # edition ar "in place" we do not need to make any merge,
        # just applies changes on parent for edition
        cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
        stats = None
    else:
        try:
            # ui.forcemerge is an internal variable, do not document
            repo.ui.setconfig("ui", "forcemerge", opts.get("tool", ""))
            stats = mergemod.update(repo, ctx.node(), True, True, False, ctx.p1().node())
        finally:
            repo.ui.setconfig("ui", "forcemerge", "")
        repo.setparents(wcpar, node.nullid)
        repo.dirstate.write()
        # fix up dirstate for copies and renames
    cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
    return stats
Example #8
0
def mergefiles(ui, repo, wctx, shelvectx):
    """updates to wctx and merges the changes from shelvectx into the
    dirstate."""
    oldquiet = ui.quiet
    try:
        ui.quiet = True
        hg.update(repo, wctx.node())
        files = []
        files.extend(shelvectx.files())
        files.extend(shelvectx.parents()[0].files())

        # revert will overwrite unknown files, so move them out of the way
        m, a, r, d, u = repo.status(unknown=True)[:5]
        for file in u:
            if file in files:
                util.rename(file, file + ".orig")
        cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
                       *pathtofiles(repo, files), **{'no_backup': True})
    finally:
        ui.quiet = oldquiet
Example #9
0
def mergefiles(ui, repo, wctx, shelvectx, unknownfiles):
    """updates to wctx and merges the changes from shelvectx into the
    dirstate. drops any files in unknownfiles from the dirstate."""
    oldquiet = ui.quiet
    try:
        ui.quiet = True
        hg.update(repo, wctx.node())
        files = []
        files.extend(shelvectx.files())
        files.extend(shelvectx.parents()[0].files())
        cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
                       *pathtofiles(repo, files),
                       **{'no_backup': True})
    finally:
        ui.quiet = oldquiet

    # Send untracked files back to being untracked
    dirstate = repo.dirstate
    for f in unknownfiles:
        dirstate.drop(f)
Example #10
0
def mergefiles(ui, repo, wctx, shelvectx):
    """updates to wctx and merges the changes from shelvectx into the
    dirstate."""
    oldquiet = ui.quiet
    try:
        ui.quiet = True
        hg.update(repo, wctx.node())
        files = []
        files.extend(shelvectx.files())
        files.extend(shelvectx.parents()[0].files())

        # revert will overwrite unknown files, so move them out of the way
        m, a, r, d, u = repo.status(unknown=True)[:5]
        for file in u:
            if file in files:
                util.rename(file, file + ".orig")
        cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
                       *pathtofiles(repo, files),
                       **{'no_backup': True})
    finally:
        ui.quiet = oldquiet
Example #11
0
def applychanges(ui, repo, ctx, opts):
    """Merge changeset from ctx (only) in the current working directory"""
    wcpar = repo.dirstate.parents()[0]
    if ctx.p1().node() == wcpar:
        # edition ar "in place" we do not need to make any merge,
        # just applies changes on parent for edition
        cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
        stats = None
    else:
        try:
            # ui.forcemerge is an internal variable, do not document
            repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
            stats = mergemod.update(repo, ctx.node(), True, True, False,
                                    ctx.p1().node())
        finally:
            repo.ui.setconfig('ui', 'forcemerge', '')
        repo.setparents(wcpar, node.nullid)
        repo.dirstate.write()
        # fix up dirstate for copies and renames
    cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
    return stats
Example #12
0
def dorecord(ui, repo, commitfunc, *pats, **opts):
    try:
        if not ui.interactive():
            raise util.Abort(
                _('running non-interactively, use commit instead'))
    except TypeError:  # backwards compatibility with hg 1.1
        if not ui.interactive:
            raise util.Abort(
                _('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        Its job is to interactively filter local changes, and accordingly
        prepare working dir into a state, where the job can be delegated to
        non-interactive commit command such as 'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, working dir
        state is restored to original.

        In the end we'll record interesting changes, and everything else will be
        left in place, so the user can continue his work.
        """

        merge = len(repo[None].parents()) > 1
        if merge:
            raise util.Abort(
                _('cannot partially commit a merge '
                  '(use hg commit instead)'))

        # status gives back
        #   modified, added, removed, deleted, unknown, ignored, clean
        # we take only the first 3 of these
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        try:
            # Mercurial >= 3.3 allow disabling format-changing diffopts
            diffopts = patch.difffeatureopts(ui,
                                             opts=opts,
                                             section='crecord',
                                             whitespace=True)
        except AttributeError:
            diffopts = patch.diffopts(ui, opts=opts, section='crecord')
        diffopts.nodates = True
        diffopts.git = True
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = crpatch.filterpatch(opts, crpatch.parsepatch(changes, fp),
                                     chunk_selector.chunkselector, ui)
        del fp

        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        changed = changes[0] + changes[1] + changes[2]
        newfiles = [f for f in changed if f in contenders]

        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        # 2. backup changed files, so we can restore them in the end
        backups = {}
        newly_added_backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in (modified + added):
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_') +
                                               '.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                if f in modified:
                    backups[f] = tmpname
                elif f in added:
                    newly_added_backups[f] = tmpname

            fp = cStringIO.StringIO()
            all_backups = {}
            all_backups.update(backups)
            all_backups.update(newly_added_backups)
            for c in chunks:
                if c.filename() in all_backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 2.5 optionally review / modify patch in text editor
            if opts['crecord_reviewpatch']:
                patchtext = fp.read()
                reviewedpatch = ui.edit(patchtext, "")
                fp.truncate(0)
                fp.write(reviewedpatch)
                fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                cmdutil.revert(ui, repo, repo['.'], repo.dirstate.parents(),
                               *backups)
            # remove newly added files from 'clean' repo (so patch can apply)
            for f in newly_added_backups:
                os.unlink(repo.wjoin(f))

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    if hasattr(patch, 'workingbackend'):  # detect 1.9
                        patch.internalpatch(ui,
                                            repo,
                                            fp,
                                            strip=1,
                                            eolmode=None)
                    else:
                        pfiles = {}
                        try:
                            patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                        except (TypeError, AttributeError):  # pre 17cea10c343e
                            try:
                                patch.internalpatch(ui,
                                                    repo,
                                                    fp,
                                                    1,
                                                    repo.root,
                                                    eolmode=None)
                            except (TypeError,
                                    AttributeError):  # pre 00a881581400
                                try:
                                    patch.internalpatch(fp,
                                                        ui,
                                                        1,
                                                        repo.root,
                                                        files=pfiles,
                                                        eolmode=None)
                                except TypeError:  # backwards compatible with hg 1.1
                                    patch.internalpatch(fp,
                                                        ui,
                                                        1,
                                                        repo.root,
                                                        files=pfiles)
                        try:
                            cmdutil.updatedir(ui, repo, pfiles)
                        except AttributeError:
                            try:
                                patch.updatedir(ui, repo, pfiles)
                            except AttributeError:
                                # from 00a881581400 onwards
                                pass
                except patch.PatchError, err:
                    s = str(err)
                    if s:
                        raise util.Abort(s)
                    else:
                        raise util.Abort(_('patch failed to apply'))
            del fp

            # 4. We prepared working directory according to filtered patch.
            #    Now is the time to delegate the job to commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call a
            # highlevel command with list of pathnames relative to repo root
            newfiles = [repo.wjoin(n) for n in newfiles]
            commitfunc(ui, repo, *newfiles, **opts)

            return 0
Example #13
0
def shelve(ui, repo, *pats, **opts):
    '''interactively select changes to set aside

    If a list of files is omitted, all changes reported by :hg:` status`
    will be candidates for shelving.

    You will be prompted for whether to shelve changes to each
    modified file, and for files with multiple changes, for each
    change to use.

    The shelve command works with the Color extension to display
    diffs in color.

    On each prompt, the following responses are possible::

      y - shelve this change
      n - skip this change

      s - skip remaining changes to this file
      f - shelve remaining changes to this file

      d - done, skip remaining changes and files
      a - shelve all changes to all remaining files
      q - quit, shelving no changes

      ? - display help
    '''

    if not ui.interactive():
        raise util.Abort(_('shelve can only be run interactively'))

    # List all the active shelves by name and return '
    if opts['list']:
        listshelves(ui, repo)
        return

    forced = opts['force'] or opts['append']

    # Shelf name and path
    shelfname = opts.get('name')
    shelfpath = getshelfpath(repo, shelfname)

    if os.path.exists(repo.join(shelfpath)) and not forced:
        raise util.Abort(_('shelve data already exists'))

    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        diffopts = patch.diffopts(ui, opts={'git': True, 'nodates': True})
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO(''.join(chunks))

        try:
            ac = parsepatch(fp)
        except patch.PatchError, err:
            raise util.Abort(_('error parsing patch: %s') % err)

        del fp

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        changed = modified + added + removed
        newfiles = set(f for f in changed if f in contenders)
        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        # 2. backup changed files, so we can restore them in case of error
        backupdir = repo.join('shelve-backups')
        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo  (clean)
                opts['no_backup'] = True
                cmdutil.revert(ui, repo, repo['.'], parents,
                               *[os.path.join(repo.root, f) for f in newfiles],
                               **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. (apply)
                if dopatch:
                    try:
                        ui.debug('applying patch\n')
                        ui.debug(fp.getvalue())
                        patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                    except patch.PatchError, err:
                        raise util.Abort(str(err))
                del fp

                # 4. We prepared working directory according to filtered
                #    patch. Now is the time to save the shelved changes!
                ui.debug("saving patch to shelve\n")
                if opts['append']:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(f):
                            os.unlink(f)
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                        # Our calls to copystat() here and above are a
                        # hack to trick any editors that have f open that
                        # we haven't modified them.
                        #
                        # Also note that this racy as an editor could
                        # notice the file's mtime before we've finished
                        # writing it.
                        shutil.copystat(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug('removing shelve file\n')
                    if os.path.isfile(repo.wjoin(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)
Example #14
0
    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = "".join(patch.diff(repo, parents[0], match=match, changes=changes, opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()

        chunks = filterpatch(ui, ac, not opts["all"])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        newfiles = set(f for f in files if f in contenders)

        if not newfiles:
            ui.status(_("no changes to shelve\n"))
            return 0

        backupdir = repo.join("shelve-backups")

        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                opts["no_backup"] = True
                cmdutil.revert(ui, repo, repo["."], parents, *[os.path.join(repo.root, f) for f in newfiles], **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug("applying patch\n")
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                ui.debug("saving patch to shelve\n")
                if opts["append"]:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(f):
                            os.unlink(f)
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug("restoring %r to %r\n" % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug("removing shelve file\n")
                    if os.path.isfile(repo.wjoin(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)

            return 0
Example #15
0
        wlock = repo.wlock()

        if ancestor.node() != wctx.node():
            conflicts = hg.merge(repo, tip.node(), force=True, remind=False)
            ms = merge.mergestate(repo)
            stripnodes = [repo.changelog.node(rev)
                          for rev in xrange(oldtiprev, len(repo))]
            if conflicts:
                shelvedstate.save(repo, basename, stripnodes)
                # Fix up the dirstate entries of files from the second
                # parent as if we were not merging, except for those
                # with unresolved conflicts.
                parents = repo.parents()
                revertfiles = set(parents[1].files()).difference(ms)
                cmdutil.revert(ui, repo, parents[1],
                               (parents[0].node(), nullid),
                               *revertfiles, **{'no_backup': True})
                raise error.InterventionRequired(
                    _("unresolved conflicts (see 'hg resolve', then "
                      "'hg unshelve --continue')"))
            finishmerge(ui, repo, ms, stripnodes, basename, opts)
        else:
            parent = tip.parents()[0]
            hg.update(repo, parent.node())
            cmdutil.revert(ui, repo, tip, repo.dirstate.parents(), *tip.files(),
                           **{'no_backup': True})

        prevquiet = ui.quiet
        ui.quiet = True
        try:
            repo.rollback(force=True)
Example #16
0
def dorecord(ui, repo, commitfunc, *pats, **opts):
    try:
        if not ui.interactive():
            raise util.Abort(_('running non-interactively, use commit instead'))
    except TypeError: # backwards compatibility with hg 1.1
        if not ui.interactive:
            raise util.Abort(_('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        Its job is to interactively filter local changes, and accordingly
        prepare working dir into a state, where the job can be delegated to
        non-interactive commit command such as 'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, working dir
        state is restored to original.

        In the end we'll record interesting changes, and everything else will be
        left in place, so the user can continue his work.
        """

        merge = len(repo[None].parents()) > 1
        if merge:
            raise util.Abort(_('cannot partially commit a merge '
                               '(use hg commit instead)'))

        # status gives back
        #   modified, added, removed, deleted, unknown, ignored, clean
        # we take only the first 3 of these
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        try:
            # Mercurial >= 3.3 allow disabling format-changing diffopts
            diffopts = patch.difffeatureopts(ui, opts=opts, section='crecord',
                                             whitespace=True)
        except AttributeError:
            diffopts = patch.diffopts(ui, opts=opts, section='crecord')
        diffopts.nodates = True
        diffopts.git = True
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = crpatch.filterpatch(opts,
                                     crpatch.parsepatch(changes, fp),
                                     chunk_selector.chunkselector, ui)
        del fp

        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        changed = changes[0] + changes[1] + changes[2]
        newfiles = [f for f in changed if f in contenders]

        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0


        # 2. backup changed files, so we can restore them in the end
        backups = {}
        newly_added_backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in (modified + added):
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                if f in modified:
                    backups[f] = tmpname
                elif f in added:
                    newly_added_backups[f] = tmpname

            fp = cStringIO.StringIO()
            all_backups = {}
            all_backups.update(backups)
            all_backups.update(newly_added_backups)
            for c in chunks:
                if c.filename() in all_backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 2.5 optionally review / modify patch in text editor
            if opts['crecord_reviewpatch']:
                patchtext = fp.read()
                reviewedpatch = ui.edit(patchtext, "")
                fp.truncate(0)
                fp.write(reviewedpatch)
                fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                cmdutil.revert(ui, repo, repo['.'], repo.dirstate.parents(),
                               *backups)
            # remove newly added files from 'clean' repo (so patch can apply)
            for f in newly_added_backups:
                os.unlink(repo.wjoin(f))

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    if hasattr(patch, 'workingbackend'): # detect 1.9
                        patch.internalpatch(ui, repo, fp, strip=1, eolmode=None)
                    else:
                        pfiles = {}
                        try:
                            patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                        except (TypeError, AttributeError): # pre 17cea10c343e
                            try:
                                patch.internalpatch(ui, repo, fp, 1, repo.root,
                                                    eolmode=None)
                            except (TypeError, AttributeError): # pre 00a881581400
                                try:
                                    patch.internalpatch(fp, ui, 1, repo.root,
                                                        files=pfiles, eolmode=None)
                                except TypeError: # backwards compatible with hg 1.1
                                    patch.internalpatch(fp, ui, 1,
                                                        repo.root, files=pfiles)
                        try:
                            cmdutil.updatedir(ui, repo, pfiles)
                        except AttributeError:
                            try:
                                patch.updatedir(ui, repo, pfiles)
                            except AttributeError:
                                # from 00a881581400 onwards
                                pass
                except patch.PatchError, err:
                    s = str(err)
                    if s:
                        raise util.Abort(s)
                    else:
                        raise util.Abort(_('patch failed to apply'))
            del fp

            # 4. We prepared working directory according to filtered patch.
            #    Now is the time to delegate the job to commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call a
            # highlevel command with list of pathnames relative to repo root
            newfiles = [repo.wjoin(n) for n in newfiles]
            commitfunc(ui, repo, *newfiles, **opts)

            return 0
Example #17
0
    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = ''.join(
            patch.diff(repo,
                       parents[0],
                       match=match,
                       changes=changes,
                       opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()

        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        newfiles = set(f for f in files if f in contenders)

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        backupdir = repo.join('shelve-backups')

        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                opts['no_backup'] = True
                cmdutil.revert(ui, repo, repo['.'], parents,
                               *[os.path.join(repo.root, f) for f in newfiles],
                               **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                ui.debug("saving patch to shelve\n")
                if opts['append']:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(f):
                            os.unlink(f)
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug('removing shelve file\n')
                    if os.path.isfile(repo.wjoin(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)

            return 0
Example #18
0
def shelve(ui, repo, *pats, **opts):
    '''interactively select changes to set aside

    If a list of files is omitted, all changes reported by :hg:` status`
    will be candidates for shelving.

    You will be prompted for whether to shelve changes to each
    modified file, and for files with multiple changes, for each
    change to use.

    The shelve command works with the Color extension to display
    diffs in color.

    On each prompt, the following responses are possible::

      y - shelve this change
      n - skip this change

      s - skip remaining changes to this file
      f - shelve remaining changes to this file

      d - done, skip remaining changes and files
      a - shelve all changes to all remaining files
      q - quit, shelving no changes

      ? - display help
    '''

    if not ui.interactive() and not (opts['all'] or opts['list']):
        raise util.Abort(_('shelve can only be run interactively'))

    # List all the active shelves by name and return '
    if opts['list']:
        listshelves(ui, repo)
        return

    forced = opts['force'] or opts['append']

    # Shelf name and path
    shelfname = opts.get('name')
    shelfpath = getshelfpath(repo, shelfname)

    if os.path.exists(repo.join(shelfpath)) and not forced:
        raise util.Abort(_('shelve data already exists'))

    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        diffopts = patch.diffopts(ui, opts={'git': True, 'nodates': True})
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO(''.join(chunks))

        try:
            ac = parsepatch(fp)
        except patch.PatchError, err:
            raise util.Abort(_('error parsing patch: %s') % err)

        del fp

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        changed = modified + added + removed
        newfiles = set(f for f in changed if f in contenders)
        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        # 2. backup changed files, so we can restore them in case of error
        backupdir = repo.join('shelve-backups')
        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo  (clean)
                opts['no_backup'] = True
                cmdutil.revert(ui, repo, repo['.'], parents,
                               *[repo.wjoin(f) for f in newfiles], **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. (apply)
                if dopatch:
                    try:
                        ui.debug('applying patch\n')
                        ui.debug(fp.getvalue())
                        patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                    except patch.PatchError, err:
                        raise util.Abort(str(err))
                del fp

                # 4. We prepared working directory according to filtered
                #    patch. Now is the time to save the shelved changes!
                ui.debug("saving patch to shelve\n")
                if opts['append']:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(repo.wjoin(f)):
                            os.unlink(repo.wjoin(f))
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                        # Our calls to copystat() here and above are a
                        # hack to trick any editors that have f open that
                        # we haven't modified them.
                        #
                        # Also note that this racy as an editor could
                        # notice the file's mtime before we've finished
                        # writing it.
                        shutil.copystat(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug('removing shelve file\n')
                    if os.path.isfile(repo.join(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)