Example #1
0
def squash_revisions(ui, repo, revs, start, end):
    """
    Squashes the various revisions together, it does so by
    1) updating to the parent of the start
    2) revert all the changes from the end
    3) commit the changes, which creates a new head
    4) strips the old revisions from start

    Args:
        ui - the ui object
        repo - the current repository
        revs - the set of revisions
        start - the start rev
        end - the end rev
    """
    start_node = repo[start].parents()[0].node()
    end_node = repo[end].node()
    # step 1
    hg.update(repo, start_node)
    # step 2
    hg.revert(repo, end_node, choose)
Example #2
0
                os.close(fd)
                ui.debug("backup %r as %r\n" % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                shutil.copystat(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.p1(), lambda key: key in backups)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug("applying patch\n")
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                except patch.PatchError, err:
                    raise util.Abort(str(err))
            del fp

            # 4. We prepared working directory according to filtered
            #    patch. Now is the time to delegate the job to
            #    commit/qrefresh or the like!
Example #3
0
    def shelvefunc(ui, repo, message, match, opts):
        files = []
        if match.files():
            changes = None
        else:
            changes = repo.status(match=match)[:3]
            modified, added, removed = changes
            files = modified + added + removed
            match = cmdutil.matchfiles(repo, files)
        diffopts = repo.attic.diffopts({'git': True, 'nodates': True})
        chunks = patch.diff(repo,
                            repo.dirstate.parents()[0],
                            match=match,
                            changes=changes,
                            opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        ac = record.parsepatch(fp)
        chunks = record.filterpatch(ui, ac)
        # and a not-intending-to apply subset of it
        rc = refilterpatch(ac, chunks)
        del fp

        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])
        backups = {}
        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo,
                              repo.dirstate.parents()[0], backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug(_("saving patch to %s\n") % (name))
                    s = repo.attic
                    f = s.opener(name, 'w')
                    f.write(sp.getvalue())
                    del f
                    s.currentpatch = name
                    s.persiststate()
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug(
                            _('restoring %r to %r\n') % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                except OSError:
                    pass

            return 0
        finally:
            try:
                for realname, tmpname in backups.iteritems():
                    ui.debug(
                        _('removing backup for %r : %r\n') %
                        (realname, tmpname))
                    os.unlink(tmpname)
                os.rmdir(backupdir)
            except OSError:
                pass
Example #4
0
    def shelvefunc(ui, repo, message, match, opts):
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = ''.join(patch.diff(repo, repo.dirstate.parents()[0],
                           match=match, changes=changes, opts=diffopts))
        
        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()
        
        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        contenders = {}
        for h in chunks:
            try: contenders.update(dict.fromkeys(h.files()))
            except AttributeError: pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])

        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)
            
            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug("saving patch to shelve\n")
                    if opts['append']:
                        f = repo.opener(shelfpath, "a")
                    else:
                        f = repo.opener(shelfpath, "w")
                    f.write(sp.getvalue())
                    del f
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    ui.debug('removing shelve file\n')
                    os.unlink(repo.join(shelfpath))
                except OSError:
                    pass

            return 0
        finally:
            try:
                for realname, tmpname in backups.iteritems():
                    ui.debug('removing backup for %r : %r\n' % (realname, tmpname))
                    os.unlink(tmpname)
                os.rmdir(backupdir)
            except OSError:
                pass
Example #5
0
def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, *pats, **opts):
    if not ui.interactive():
        raise util.Abort(_('running non-interactively, use %s instead') %
                         cmdsuggest)

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        Its job is to interactively filter local changes, and
        accordingly prepare working directory into a state in which the
        job can be delegated to a non-interactive commit command such as
        'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, the
        working directory is restored to its original state.

        In the end we'll record interesting changes, and everything else
        will be left in place, so the user can continue working.
        """

        merge = len(repo[None].parents()) > 1
        if merge:
            raise util.Abort(_('cannot partially commit a merge '
                               '(use "hg commit" instead)'))

        changes = repo.status(match=match)[:3]
        diffopts = mdiff.diffopts(git=True, nodates=True,
                                  ignorews=opts.get('ignore_all_space'),
                                  ignorewsamount=opts.get('ignore_space_change'),
                                  ignoreblanklines=opts.get('ignore_blank_lines'))
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, parsepatch(fp))
        del fp

        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        changed = changes[0] + changes[1] + changes[2]
        newfiles = [f for f in changed if f in contenders]
        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        modified = set(changes[0])

        # 2. backup changed files, so we can restore them in the end
        if backupall:
            tobackup = changed
        else:
            tobackup = [f for f in newfiles if f in modified]

        backups = {}
        if tobackup:
            backupdir = repo.join('record-backups')
            try:
                os.mkdir(backupdir)
            except OSError, err:
                if err.errno != errno.EEXIST:
                    raise
        try:
            # backup continues
            for f in tobackup:
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                shutil.copystat(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.p1(),
                          lambda key: key in backups)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                except patch.PatchError, err:
                    raise util.Abort(str(err))
            del fp

            # 4. We prepared working directory according to filtered
            #    patch. Now is the time to delegate the job to
            #    commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call
            # a highlevel command with list of pathnames relative to
            # repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                commitfunc(ui, repo, *newfiles, **opts)
            finally:
                os.chdir(cwd)

            return 0
Example #6
0
def dorecord(ui, repo, commitfunc, *pats, **opts):
    try:
        if not ui.interactive():
            raise util.Abort(_('running non-interactively, use commit instead'))
    except TypeError: # backwards compatibility with hg 1.1
        if not ui.interactive:
            raise util.Abort(_('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        Its job is to interactively filter local changes, and accordingly
        prepare working dir into a state, where the job can be delegated to
        non-interactive commit command such as 'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, working dir
        state is restored to original.

        In the end we'll record interesting changes, and everything else will be
        left in place, so the user can continue his work.
        """

        merge = len(repo[None].parents()) > 1
        if merge:
            raise util.Abort(_('cannot partially commit a merge '
                               '(use hg commit instead)'))

        # status gives back
        #   modified, added, removed, deleted, unknown, ignored, clean
        # we take only the first 3 of these
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        diffopts = opts.copy()
        diffopts['nodates'] = True
        diffopts['git'] = True
        diffopts = patch.diffopts(ui, opts=diffopts)
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = crpatch.filterpatch(opts,
                                     crpatch.parsepatch(changes, fp),
                                     chunk_selector.chunkselector, ui)
        del fp

        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        changed = changes[0] + changes[1] + changes[2]
        newfiles = [f for f in changed if f in contenders]

        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0


        # 2. backup changed files, so we can restore them in the end
        backups = {}
        newly_added_backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in (modified + added):
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                if f in modified:
                    backups[f] = tmpname
                elif f in added:
                    newly_added_backups[f] = tmpname

            fp = cStringIO.StringIO()
            all_backups = {}
            all_backups.update(backups)
            all_backups.update(newly_added_backups)
            for c in chunks:
                if c.filename() in all_backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 2.5 optionally review / modify patch in text editor
            if opts['crecord_reviewpatch']:
                patchtext = fp.read()
                reviewedpatch = ui.edit(patchtext, "")
                fp.truncate(0)
                fp.write(reviewedpatch)
                fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.parents()[0],
                          lambda key: key in backups)
            # remove newly added files from 'clean' repo (so patch can apply)
            for f in newly_added_backups:
                os.unlink(f)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    if hasattr(patch, 'workingbackend'): # detect 1.9
                        patch.internalpatch(ui, repo, fp, strip=1, eolmode=None)
                    else:
                        pfiles = {}
                        try:
                            patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                        except (TypeError, AttributeError): # pre 17cea10c343e
                            try:
                                patch.internalpatch(ui, repo, fp, 1, repo.root,
                                                    eolmode=None)
                            except (TypeError, AttributeError): # pre 00a881581400
                                try:
                                    patch.internalpatch(fp, ui, 1, repo.root,
                                                        files=pfiles, eolmode=None)
                                except TypeError: # backwards compatible with hg 1.1
                                    patch.internalpatch(fp, ui, 1,
                                                        repo.root, files=pfiles)
                        try:
                            cmdutil.updatedir(ui, repo, pfiles)
                        except AttributeError:
                            try:
                                patch.updatedir(ui, repo, pfiles)
                            except AttributeError:
                                # from 00a881581400 onwards
                                pass
                except patch.PatchError, err:
                    s = str(err)
                    if s:
                        raise util.Abort(s)
                    else:
                        raise util.Abort(_('patch failed to apply'))
            del fp

            # 4. We prepared working directory according to filtered patch.
            #    Now is the time to delegate the job to commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call a
            # highlevel command with list of pathnames relative to repo root
            newfiles = [repo.wjoin(n) for n in newfiles]
            commitfunc(ui, repo, *newfiles, **opts)

            return 0
Example #7
0
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                shutil.copystat(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.p1(),
                          lambda key: key in backups)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                except patch.PatchError, err:
                    raise util.Abort(str(err))
            del fp

            # 4. We prepared working directory according to filtered
            #    patch. Now is the time to delegate the job to
            #    commit/qrefresh or the like!
Example #8
0
    def shelvefunc(ui, repo, message, match, opts):
        # If an MQ patch is applied, consider all qdiff changes
        if hasattr(repo, 'mq') and repo.mq.applied and repo['.'] == repo['qtip']:
            qtip = repo['.']
            basenode = qtip.parents()[0].node()
        else:
            basenode = repo.dirstate.parents()[0]

        changes = repo.status(node1=basenode, match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = ''.join(patch.diff(repo, basenode, match=match,
                             changes=changes, opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()
        chunks = filterpatch(ui, ac)
        rc = refilterpatch(ac, chunks)

        contenders = {}
        for h in chunks:
            try: contenders.update(dict.fromkeys(h.files()))
            except AttributeError: pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])

        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo, basenode, backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root, eolmode=None)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug(_('saving patch to shelve\n'))
                    if opts['append']:
                        f = repo.opener('shelve', "a")
                    else:
                        f = repo.opener('shelve', "w")
                    f.write(sp.getvalue())
                    del f
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug(_('restoring %r to %r\n') % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    ui.debug(_('removing shelve file\n'))
                    os.unlink(repo.join('shelve'))
                except (IOError, OSError), e:
                    ui.warn(_('abort: backup restore failed, %s\n') % str(e))

            return 0
Example #9
0
def dorecord(ui, repo, commitfunc, *pats, **opts):
    if not ui.interactive():
        raise util.Abort(_('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        Its job is to interactively filter local changes, and
        accordingly prepare working directory into a state in which the
        job can be delegated to a non-interactive commit command such as
        'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, the
        working directory is restored to its original state.

        In the end we'll record interesting changes, and everything else
        will be left in place, so the user can continue working.
        """

        merge = len(repo[None].parents()) > 1
        if merge:
            raise util.Abort(
                _('cannot partially commit a merge '
                  '(use "hg commit" instead)'))

        changes = repo.status(match=match)[:3]
        diffopts = mdiff.diffopts(git=True, nodates=True)
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, parsepatch(fp))
        del fp

        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        changed = changes[0] + changes[1] + changes[2]
        newfiles = [f for f in changed if f in contenders]
        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        modified = set(changes[0])

        # 2. backup changed files, so we can restore them in the end
        backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in modified:
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_') +
                                               '.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                shutil.copystat(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo,
                          repo.dirstate.parents()[0],
                          lambda key: key in backups)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    pfiles = {}
                    patch.internalpatch(fp,
                                        ui,
                                        1,
                                        repo.root,
                                        files=pfiles,
                                        eolmode=None)
                    cmdutil.updatedir(ui, repo, pfiles)
                except patch.PatchError, err:
                    raise util.Abort(str(err))
            del fp

            # 4. We prepared working directory according to filtered
            #    patch. Now is the time to delegate the job to
            #    commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call
            # a highlevel command with list of pathnames relative to
            # repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                commitfunc(ui, repo, *newfiles, **opts)
            finally:
                os.chdir(cwd)

            return 0
Example #10
0
    def shelvefunc(ui, repo, message, match, opts):
        files = []
        if match.files():
            changes = None
        else:
            changes = repo.status(match = match)[:3]
            modified, added, removed = changes
            files = modified + added + removed
            match = cmdutil.matchfiles(repo, files)
        diffopts = repo.attic.diffopts( {'git':True, 'nodates':True})
        chunks = patch.diff(repo, repo.dirstate.parents()[0], match = match,
                            changes = changes, opts = diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        ac = record.parsepatch(fp)
        chunks = record.filterpatch(ui, ac)
        # and a not-intending-to apply subset of it
        rc = refilterpatch(ac, chunks)
        del fp

        contenders = {}
        for h in chunks:
            try: contenders.update(dict.fromkeys(h.files()))
            except AttributeError: pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])
        backups = {}
        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug(_("saving patch to %s\n") % (name))
                    s = repo.attic
                    f = s.opener(name, 'w')
                    f.write(sp.getvalue())
                    del f
                    s.currentpatch = name
                    s.persiststate()
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug(_('restoring %r to %r\n') % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                except OSError:
                    pass

            return 0
        finally:
            try:
                for realname, tmpname in backups.iteritems():
                    ui.debug(_('removing backup for %r : %r\n') % (realname, tmpname))
                    os.unlink(tmpname)
                os.rmdir(backupdir)
            except OSError:
                pass
Example #11
0
def dorecord(ui, repo, committer, *pats, **opts):
    if not ui.interactive:
        raise util.Abort(_('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        It's job is to interactively filter local changes, and accordingly
        prepare working dir into a state, where the job can be delegated to
        non-interactive commit command such as 'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, working dir
        state is restored to original.

        In the end we'll record intresting changes, and everything else will be
        left in place, so the user can continue his work.
        """
        if match.files():
            changes = None
        else:
            changes = repo.status(match=match)[:3]
            modified, added, removed = changes
            match = cmdutil.matchfiles(repo, modified + added + removed)
        diffopts = mdiff.diffopts(git=True, nodates=True)
        chunks = patch.diff(repo,
                            repo.dirstate.parents()[0],
                            match=match,
                            changes=changes,
                            opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        if changes is not None:
            chunks = filterpatch(opts, parsepatch(changes, fp), chunkselector)
        else:
            chgs = repo.status(match=match)[:3]
            chunks = filterpatch(opts, parsepatch(chgs, fp), chunkselector)

        del fp

        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        newfiles = [f for f in match.files() if f in contenders]

        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        if changes is None:
            match = cmdutil.matchfiles(repo, newfiles)
            changes = repo.status(match=match)
        modified = dict.fromkeys(changes[0])

        # 2. backup changed files, so we can restore them in the end
        backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in modified:
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_') +
                                               '.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug(_('backup %r as %r\n') % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                except patch.PatchError, err:
                    s = str(err)
                    if s:
                        raise util.Abort(s)
                    else:
                        raise util.Abort(_('patch failed to apply'))
            del fp

            # 4. We prepared working directory according to filtered patch.
            #    Now is the time to delegate the job to commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call a
            # highlevel command with list of pathnames relative to repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                committer(ui, repo, newfiles, opts)
            finally:
                os.chdir(cwd)

            return 0
Example #12
0
    def shelvefunc(ui, repo, message, match, opts):
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = ''.join(
            patch.diff(repo,
                       repo.dirstate.parents()[0],
                       match=match,
                       changes=changes,
                       opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()

        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])

        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo,
                              repo.dirstate.parents()[0], backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug("saving patch to shelve\n")
                    if opts['append']:
                        f = repo.opener(shelfpath, "a")
                    else:
                        f = repo.opener(shelfpath, "w")
                    f.write(sp.getvalue())
                    del f
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    ui.debug('removing shelve file\n')
                    os.unlink(repo.join(shelfpath))
                except OSError:
                    pass

            return 0
        finally:
            try:
                for realname, tmpname in backups.iteritems():
                    ui.debug('removing backup for %r : %r\n' %
                             (realname, tmpname))
                    os.unlink(tmpname)
                os.rmdir(backupdir)
            except OSError:
                pass
Example #13
0
def dorecord(ui, repo, committer, *pats, **opts):
    if not ui.interactive:
        raise util.Abort(_('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        It's job is to interactively filter local changes, and accordingly
        prepare working dir into a state, where the job can be delegated to
        non-interactive commit command such as 'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, working dir
        state is restored to original.

        In the end we'll record intresting changes, and everything else will be
        left in place, so the user can continue his work.
        """
        if match.files():
            changes = None
        else:
            changes = repo.status(match=match)[:3]
            modified, added, removed = changes
            match = cmdutil.matchfiles(repo, modified + added + removed)
        diffopts = mdiff.diffopts(git=True, nodates=True)
        chunks = patch.diff(repo, repo.dirstate.parents()[0], match=match,
                            changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        if changes is not None:
            chunks = filterpatch(opts, parsepatch(changes, fp), chunkselector)
        else:
            chgs = repo.status(match=match)[:3]
            chunks = filterpatch(opts, parsepatch(chgs, fp), chunkselector)
            
        del fp

        contenders = {}
        for h in chunks:
            try: contenders.update(dict.fromkeys(h.files()))
            except AttributeError: pass

        newfiles = [f for f in match.files() if f in contenders]

        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        if changes is None:
            match = cmdutil.matchfiles(repo, newfiles)
            changes = repo.status(match=match)
        modified = dict.fromkeys(changes[0])

        # 2. backup changed files, so we can restore them in the end
        backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in modified:
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug(_('backup %r as %r\n') % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                except patch.PatchError, err:
                    s = str(err)
                    if s:
                        raise util.Abort(s)
                    else:
                        raise util.Abort(_('patch failed to apply'))
            del fp

            # 4. We prepared working directory according to filtered patch.
            #    Now is the time to delegate the job to commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call a
            # highlevel command with list of pathnames relative to repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                committer(ui, repo, newfiles, opts)
            finally:
                os.chdir(cwd)

            return 0