Ejemplo n.º 1
0
    def runPatcher(self, fp, wfile, updatestate):
        ui = self.repo.ui.copy()
        class warncapt(ui.__class__):
            def warn(self, msg, *args, **opts):
                self.write(msg)
        ui.__class__ = warncapt

        ok = True
        repo = self.repo
        ui.pushbuffer()
        try:
            eolmode = ui.config('patch', 'eol', 'strict')
            if eolmode.lower() not in patch.eolmodes:
                eolmode = 'strict'
            else:
                eolmode = eolmode.lower()
            try:
                # hg-1.9
                ret = patch.internalpatch(ui, repo, fp, 1, files=None,
                                          eolmode=eolmode, similarity=0)
            except TypeError:
                # hg-1.8
                pfiles = {}
                ret = patch.internalpatch(fp, ui, 1, repo.root, pfiles,
                                          eolmode=eolmode)
                if updatestate:
                    cmdutil.updatedir(repo.ui, repo, pfiles)
            if ret < 0:
                ok = False
                self.showMessage.emit(_('Patch failed to apply'))
        except (patch.PatchError, EnvironmentError), err:
            ok = False
            self.showMessage.emit(hglib.tounicode(str(err)))
Ejemplo n.º 2
0
def unshelve(ui, repo, **opts):
    '''restore shelved changes'''

    # Shelf name and path
    shelfname = opts.get('name')
    shelfpath = getshelfpath(repo, shelfname)

    # List all the active shelves by name and return '
    if opts['list']:
        listshelves(ui,repo)
        return
        
    try:
        patch_diff = repo.opener(shelfpath).read()
        fp = cStringIO.StringIO(patch_diff)
        if opts['inspect']:
            ui.status(fp.getvalue())
        else:
            files = []
            ac = parsepatch(fp)
            for chunk in ac:
                if isinstance(chunk, header):
                    files += chunk.files()
            backupdir = repo.join('shelve-backups')
            backups = makebackup(ui, repo, backupdir, set(files))

            ui.debug('applying shelved patch\n')
            patchdone = 0
            try:
                try:
                    fp.seek(0)
                    patch.internalpatch(ui, repo, fp, 1)
                    patchdone = 1
                except:
                    if opts['force']:
                        patchdone = 1
                    else:
                        ui.status('restoring backup files\n')
                        for realname, tmpname in backups.iteritems():
                            ui.debug('restoring %r to %r\n' % 
                                     (tmpname, realname))
                            util.copyfile(tmpname, repo.wjoin(realname))
            finally:
                try:
                    ui.debug('removing backup files\n')
                    shutil.rmtree(backupdir, True)
                except OSError:
                    pass

            if patchdone:
                ui.debug("removing shelved patches\n")
                os.unlink(repo.join(shelfpath))
                ui.status("unshelve completed\n")
    except IOError:
        ui.warn('nothing to unshelve\n')
Ejemplo n.º 3
0
    def runPatcher(self, fp, wfile, updatestate):
        ui = self.repo.ui.copy()

        class warncapt(ui.__class__):
            def warn(self, msg, *args, **opts):
                self.write(msg)

        ui.__class__ = warncapt

        ok = True
        repo = self.repo
        ui.pushbuffer()
        try:
            eolmode = ui.config('patch', 'eol', 'strict')
            if eolmode.lower() not in patch.eolmodes:
                eolmode = 'strict'
            else:
                eolmode = eolmode.lower()
            # 'updatestate' flag has no effect since hg 1.9
            try:
                ret = patch.internalpatch(ui,
                                          repo,
                                          fp,
                                          1,
                                          files=None,
                                          eolmode=eolmode,
                                          similarity=0)
            except ValueError:
                ret = -1
            if ret < 0:
                ok = False
                self.showMessage.emit(_('Patch failed to apply'))
        except (patch.PatchError, EnvironmentError), err:
            ok = False
            self.showMessage.emit(hglib.tounicode(str(err)))
Ejemplo n.º 4
0
    def runPatcher(self, fp, wfile, updatestate):
        ui = self.repo.ui.copy()
        class warncapt(ui.__class__):
            def warn(self, msg, *args, **opts):
                self.write(msg)
        ui.__class__ = warncapt

        ok = True
        repo = self.repo
        ui.pushbuffer()
        try:
            eolmode = ui.config('patch', 'eol', 'strict')
            if eolmode.lower() not in patch.eolmodes:
                eolmode = 'strict'
            else:
                eolmode = eolmode.lower()
            # 'updatestate' flag has no effect since hg 1.9
            try:
                ret = patch.internalpatch(ui, repo, fp, 1, files=None,
                                          eolmode=eolmode, similarity=0)
            except ValueError:
                ret = -1
            if ret < 0:
                ok = False
                self.showMessage.emit(_('Patch failed to apply'))
        except (patch.PatchError, EnvironmentError), err:
            ok = False
            self.showMessage.emit(hglib.tounicode(str(err)))
Ejemplo n.º 5
0
def unstash(ui, repo, task):
    '''Unstashes a working copy.  Returns True if a stash was found and applied,
    False if no stash exists.'''
    if not hasstash(repo, task):
        return False

    ui.write('unstashing task: %s\n' % task)
    ui.debug('unstashing %s from stash file: %s\n' %
             (task, stashfiles(repo, task)[0]))

    patchfile = repo.join(stashfiles(repo, task)[0])
    dirstatefile = repo.join(stashfiles(repo, task)[1])

    files = {}
    if os.path.exists(patchfile):
        try:
            fuzz = patch.internalpatch(patchfile,
                                       ui,
                                       strip=1,
                                       cwd=repo.root,
                                       files=files)
        except Exception, inst:
            ui.note(str(inst) + '\n')
            ui.warn('patch failed, unable to continue\n')
            ui.warn('see %s for stash patch\n' % patchfile)
            return False

        if files:
            patch.updatedir(ui, repo, files)
Ejemplo n.º 6
0
def unstash(ui, repo, task):
    '''Unstashes a working copy.  Returns True if a stash was found and applied,
    False if no stash exists.'''
    if not hasstash(repo, task):
        return False

    ui.write('unstashing task: %s\n' % task)
    ui.debug('unstashing %s from stash file: %s\n'
            % (task, stashfiles(repo, task)[0]))

    patchfile = repo.join(stashfiles(repo, task)[0])
    dirstatefile = repo.join(stashfiles(repo, task)[1])

    files = {}
    if os.path.exists(patchfile):
        try:
            fuzz = patch.internalpatch(patchfile, ui, strip = 1,
                               cwd = repo.root, files = files)
        except Exception, inst:
            ui.note(str(inst) + '\n')
            ui.warn('patch failed, unable to continue\n')
            ui.warn('see %s for stash patch\n' % patchfile)
            return False

        if files:
            patch.updatedir(ui, repo, files)
Ejemplo n.º 7
0
def dorecord(ui, repo, commitfunc, *pats, **opts):
    try:
        if not ui.interactive():
            raise util.Abort(_('running non-interactively, use commit instead'))
    except TypeError: # backwards compatibility with hg 1.1
        if not ui.interactive:
            raise util.Abort(_('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        Its job is to interactively filter local changes, and accordingly
        prepare working dir into a state, where the job can be delegated to
        non-interactive commit command such as 'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, working dir
        state is restored to original.

        In the end we'll record interesting changes, and everything else will be
        left in place, so the user can continue his work.
        """

        merge = len(repo[None].parents()) > 1
        if merge:
            raise util.Abort(_('cannot partially commit a merge '
                               '(use hg commit instead)'))

        # status gives back
        #   modified, added, removed, deleted, unknown, ignored, clean
        # we take only the first 3 of these
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        diffopts = opts.copy()
        diffopts['nodates'] = True
        diffopts['git'] = True
        diffopts = patch.diffopts(ui, opts=diffopts)
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = crpatch.filterpatch(opts,
                                     crpatch.parsepatch(changes, fp),
                                     chunk_selector.chunkselector, ui)
        del fp

        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        changed = changes[0] + changes[1] + changes[2]
        newfiles = [f for f in changed if f in contenders]

        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0


        # 2. backup changed files, so we can restore them in the end
        backups = {}
        newly_added_backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in (modified + added):
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                if f in modified:
                    backups[f] = tmpname
                elif f in added:
                    newly_added_backups[f] = tmpname

            fp = cStringIO.StringIO()
            all_backups = {}
            all_backups.update(backups)
            all_backups.update(newly_added_backups)
            for c in chunks:
                if c.filename() in all_backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 2.5 optionally review / modify patch in text editor
            if opts['crecord_reviewpatch']:
                patchtext = fp.read()
                reviewedpatch = ui.edit(patchtext, "")
                fp.truncate(0)
                fp.write(reviewedpatch)
                fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.parents()[0],
                          lambda key: key in backups)
            # remove newly added files from 'clean' repo (so patch can apply)
            for f in newly_added_backups:
                os.unlink(f)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    if hasattr(patch, 'workingbackend'): # detect 1.9
                        patch.internalpatch(ui, repo, fp, strip=1, eolmode=None)
                    else:
                        pfiles = {}
                        try:
                            patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                        except (TypeError, AttributeError): # pre 17cea10c343e
                            try:
                                patch.internalpatch(ui, repo, fp, 1, repo.root,
                                                    eolmode=None)
                            except (TypeError, AttributeError): # pre 00a881581400
                                try:
                                    patch.internalpatch(fp, ui, 1, repo.root,
                                                        files=pfiles, eolmode=None)
                                except TypeError: # backwards compatible with hg 1.1
                                    patch.internalpatch(fp, ui, 1,
                                                        repo.root, files=pfiles)
                        try:
                            cmdutil.updatedir(ui, repo, pfiles)
                        except AttributeError:
                            try:
                                patch.updatedir(ui, repo, pfiles)
                            except AttributeError:
                                # from 00a881581400 onwards
                                pass
                except patch.PatchError, err:
                    s = str(err)
                    if s:
                        raise util.Abort(s)
                    else:
                        raise util.Abort(_('patch failed to apply'))
            del fp

            # 4. We prepared working directory according to filtered patch.
            #    Now is the time to delegate the job to commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call a
            # highlevel command with list of pathnames relative to repo root
            newfiles = [repo.wjoin(n) for n in newfiles]
            commitfunc(ui, repo, *newfiles, **opts)

            return 0
Ejemplo n.º 8
0
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.p1(),
                          lambda key: key in backups)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                except patch.PatchError, err:
                    raise util.Abort(str(err))
            del fp

            # 4. We prepared working directory according to filtered
            #    patch. Now is the time to delegate the job to
            #    commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call
            # a highlevel command with list of pathnames relative to
            # repo root
            newfiles = [repo.wjoin(nf) for nf in newfiles]
            commitfunc(ui, repo, *newfiles, **opts)

            return 0
Ejemplo n.º 9
0
def unshelve(ui, repo, **opts):
    '''restore shelved changes'''

    # Shelf name and path
    shelfname = opts.get('name')
    shelfpath = getshelfpath(repo, shelfname)

    # List all the active shelves by name and return '
    if opts['list']:
        listshelves(ui, repo)
        return

    try:
        patch_diff = repo.opener(shelfpath).read()
        fp = cStringIO.StringIO(patch_diff)
        if opts['inspect']:
            # wrap ui.write so diff output can be labeled/colorized
            def wrapwrite(orig, *args, **kw):
                label = kw.pop('label', '')
                if label:
                    label += ' '
                for chunk, l in patch.difflabel(lambda: args):
                    orig(chunk, label=label + l)

            oldwrite = ui.write
            extensions.wrapfunction(ui, 'write', wrapwrite)
            try:
                ui.status(fp.getvalue())
            finally:
                ui.write = oldwrite
        else:
            files = []
            ac = parsepatch(fp)
            for chunk in ac:
                if isinstance(chunk, header):
                    files += chunk.files()
            backupdir = repo.join('shelve-backups')
            backups = makebackup(ui, repo, backupdir, set(files))

            ui.debug('applying shelved patch\n')
            patchdone = 0
            try:
                try:
                    fp.seek(0)
                    patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                    patchdone = 1
                except:
                    if opts['force']:
                        patchdone = 1
                    else:
                        ui.status('restoring backup files\n')
                        for realname, tmpname in backups.iteritems():
                            ui.debug('restoring %r to %r\n' %
                                     (tmpname, realname))
                            util.copyfile(tmpname, repo.wjoin(realname))
            finally:
                try:
                    ui.debug('removing backup files\n')
                    shutil.rmtree(backupdir, True)
                except OSError:
                    pass

            if patchdone:
                ui.debug("removing shelved patches\n")
                os.unlink(repo.join(shelfpath))
                ui.status("unshelve completed\n")
    except IOError:
        ui.warn('nothing to unshelve\n')
Ejemplo n.º 10
0
    def shelvefunc(ui, repo, message, match, opts):
        files = []
        if match.files():
            changes = None
        else:
            changes = repo.status(match=match)[:3]
            modified, added, removed = changes
            files = modified + added + removed
            match = cmdutil.matchfiles(repo, files)
        diffopts = repo.attic.diffopts({'git': True, 'nodates': True})
        chunks = patch.diff(repo,
                            repo.dirstate.parents()[0],
                            match=match,
                            changes=changes,
                            opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        ac = record.parsepatch(fp)
        chunks = record.filterpatch(ui, ac)
        # and a not-intending-to apply subset of it
        rc = refilterpatch(ac, chunks)
        del fp

        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])
        backups = {}
        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo,
                              repo.dirstate.parents()[0], backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug(_("saving patch to %s\n") % (name))
                    s = repo.attic
                    f = s.opener(name, 'w')
                    f.write(sp.getvalue())
                    del f
                    s.currentpatch = name
                    s.persiststate()
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug(
                            _('restoring %r to %r\n') % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                except OSError:
                    pass

            return 0
        finally:
            try:
                for realname, tmpname in backups.iteritems():
                    ui.debug(
                        _('removing backup for %r : %r\n') %
                        (realname, tmpname))
                    os.unlink(tmpname)
                os.rmdir(backupdir)
            except OSError:
                pass
Ejemplo n.º 11
0
    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = "".join(patch.diff(repo, parents[0], match=match, changes=changes, opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()

        chunks = filterpatch(ui, ac, not opts["all"])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        newfiles = set(f for f in files if f in contenders)

        if not newfiles:
            ui.status(_("no changes to shelve\n"))
            return 0

        backupdir = repo.join("shelve-backups")

        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                opts["no_backup"] = True
                cmdutil.revert(ui, repo, repo["."], parents, *[os.path.join(repo.root, f) for f in newfiles], **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug("applying patch\n")
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                ui.debug("saving patch to shelve\n")
                if opts["append"]:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(f):
                            os.unlink(f)
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug("restoring %r to %r\n" % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug("removing shelve file\n")
                    if os.path.isfile(repo.wjoin(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)

            return 0
Ejemplo n.º 12
0
    def shelvefunc(ui, repo, message, match, opts):
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = ''.join(
            patch.diff(repo,
                       repo.dirstate.parents()[0],
                       match=match,
                       changes=changes,
                       opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()

        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])

        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo,
                              repo.dirstate.parents()[0], backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug("saving patch to shelve\n")
                    if opts['append']:
                        f = repo.opener(shelfpath, "a")
                    else:
                        f = repo.opener(shelfpath, "w")
                    f.write(sp.getvalue())
                    del f
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    ui.debug('removing shelve file\n')
                    os.unlink(repo.join(shelfpath))
                except OSError:
                    pass

            return 0
        finally:
            try:
                for realname, tmpname in backups.iteritems():
                    ui.debug('removing backup for %r : %r\n' %
                             (realname, tmpname))
                    os.unlink(tmpname)
                os.rmdir(backupdir)
            except OSError:
                pass
Ejemplo n.º 13
0
    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = ''.join(
            patch.diff(repo,
                       parents[0],
                       match=match,
                       changes=changes,
                       opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()

        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        newfiles = set(f for f in files if f in contenders)

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        backupdir = repo.join('shelve-backups')

        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                opts['no_backup'] = True
                cmdutil.revert(ui, repo, repo['.'], parents,
                               *[os.path.join(repo.root, f) for f in newfiles],
                               **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                ui.debug("saving patch to shelve\n")
                if opts['append']:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(f):
                            os.unlink(f)
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug('removing shelve file\n')
                    if os.path.isfile(repo.wjoin(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)

            return 0
Ejemplo n.º 14
0
def dorecord(ui, repo, committer, *pats, **opts):
    if not ui.interactive:
        raise util.Abort(_('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        It's job is to interactively filter local changes, and accordingly
        prepare working dir into a state, where the job can be delegated to
        non-interactive commit command such as 'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, working dir
        state is restored to original.

        In the end we'll record intresting changes, and everything else will be
        left in place, so the user can continue his work.
        """
        if match.files():
            changes = None
        else:
            changes = repo.status(match=match)[:3]
            modified, added, removed = changes
            match = cmdutil.matchfiles(repo, modified + added + removed)
        diffopts = mdiff.diffopts(git=True, nodates=True)
        chunks = patch.diff(repo, repo.dirstate.parents()[0], match=match,
                            changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        if changes is not None:
            chunks = filterpatch(opts, parsepatch(changes, fp), chunkselector)
        else:
            chgs = repo.status(match=match)[:3]
            chunks = filterpatch(opts, parsepatch(chgs, fp), chunkselector)
            
        del fp

        contenders = {}
        for h in chunks:
            try: contenders.update(dict.fromkeys(h.files()))
            except AttributeError: pass

        newfiles = [f for f in match.files() if f in contenders]

        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        if changes is None:
            match = cmdutil.matchfiles(repo, newfiles)
            changes = repo.status(match=match)
        modified = dict.fromkeys(changes[0])

        # 2. backup changed files, so we can restore them in the end
        backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in modified:
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug(_('backup %r as %r\n') % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                except patch.PatchError, err:
                    s = str(err)
                    if s:
                        raise util.Abort(s)
                    else:
                        raise util.Abort(_('patch failed to apply'))
            del fp

            # 4. We prepared working directory according to filtered patch.
            #    Now is the time to delegate the job to commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call a
            # highlevel command with list of pathnames relative to repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                committer(ui, repo, newfiles, opts)
            finally:
                os.chdir(cwd)

            return 0
Ejemplo n.º 15
0
def shelve(ui, repo, *pats, **opts):
    '''interactively select changes to set aside

    If a list of files is omitted, all changes reported by :hg:` status`
    will be candidates for shelving.

    You will be prompted for whether to shelve changes to each
    modified file, and for files with multiple changes, for each
    change to use.

    The shelve command works with the Color extension to display
    diffs in color.

    On each prompt, the following responses are possible::

      y - shelve this change
      n - skip this change

      s - skip remaining changes to this file
      f - shelve remaining changes to this file

      d - done, skip remaining changes and files
      a - shelve all changes to all remaining files
      q - quit, shelving no changes

      ? - display help
    '''

    if not ui.interactive() and not (opts['all'] or opts['list']):
        raise util.Abort(_('shelve can only be run interactively'))

    # List all the active shelves by name and return '
    if opts['list']:
        listshelves(ui, repo)
        return

    forced = opts['force'] or opts['append']

    # Shelf name and path
    shelfname = opts.get('name')
    shelfpath = getshelfpath(repo, shelfname)

    if os.path.exists(repo.join(shelfpath)) and not forced:
        raise util.Abort(_('shelve data already exists'))

    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        diffopts = patch.diffopts(ui, opts={'git': True, 'nodates': True})
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO(''.join(chunks))

        try:
            ac = parsepatch(fp)
        except patch.PatchError, err:
            raise util.Abort(_('error parsing patch: %s') % err)

        del fp

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        changed = modified + added + removed
        newfiles = set(f for f in changed if f in contenders)
        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        # 2. backup changed files, so we can restore them in case of error
        backupdir = repo.join('shelve-backups')
        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo  (clean)
                opts['no_backup'] = True
                cmdutil.revert(ui, repo, repo['.'], parents,
                               *[repo.wjoin(f) for f in newfiles], **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. (apply)
                if dopatch:
                    try:
                        ui.debug('applying patch\n')
                        ui.debug(fp.getvalue())
                        patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                    except patch.PatchError, err:
                        raise util.Abort(str(err))
                del fp

                # 4. We prepared working directory according to filtered
                #    patch. Now is the time to save the shelved changes!
                ui.debug("saving patch to shelve\n")
                if opts['append']:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(repo.wjoin(f)):
                            os.unlink(repo.wjoin(f))
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                        # Our calls to copystat() here and above are a
                        # hack to trick any editors that have f open that
                        # we haven't modified them.
                        #
                        # Also note that this racy as an editor could
                        # notice the file's mtime before we've finished
                        # writing it.
                        shutil.copystat(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug('removing shelve file\n')
                    if os.path.isfile(repo.join(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)
Ejemplo n.º 16
0
def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, *pats, **opts):
    if not ui.interactive():
        raise util.Abort(_('running non-interactively, use %s instead') %
                         cmdsuggest)

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        Its job is to interactively filter local changes, and
        accordingly prepare working directory into a state in which the
        job can be delegated to a non-interactive commit command such as
        'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, the
        working directory is restored to its original state.

        In the end we'll record interesting changes, and everything else
        will be left in place, so the user can continue working.
        """

        merge = len(repo[None].parents()) > 1
        if merge:
            raise util.Abort(_('cannot partially commit a merge '
                               '(use "hg commit" instead)'))

        changes = repo.status(match=match)[:3]
        diffopts = mdiff.diffopts(git=True, nodates=True,
                                  ignorews=opts.get('ignore_all_space'),
                                  ignorewsamount=opts.get('ignore_space_change'),
                                  ignoreblanklines=opts.get('ignore_blank_lines'))
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, parsepatch(fp))
        del fp

        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        changed = changes[0] + changes[1] + changes[2]
        newfiles = [f for f in changed if f in contenders]
        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        modified = set(changes[0])

        # 2. backup changed files, so we can restore them in the end
        if backupall:
            tobackup = changed
        else:
            tobackup = [f for f in newfiles if f in modified]

        backups = {}
        if tobackup:
            backupdir = repo.join('record-backups')
            try:
                os.mkdir(backupdir)
            except OSError, err:
                if err.errno != errno.EEXIST:
                    raise
        try:
            # backup continues
            for f in tobackup:
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                shutil.copystat(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.p1(),
                          lambda key: key in backups)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                except patch.PatchError, err:
                    raise util.Abort(str(err))
            del fp

            # 4. We prepared working directory according to filtered
            #    patch. Now is the time to delegate the job to
            #    commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call
            # a highlevel command with list of pathnames relative to
            # repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                commitfunc(ui, repo, *newfiles, **opts)
            finally:
                os.chdir(cwd)

            return 0
Ejemplo n.º 17
0
    def shelvefunc(ui, repo, message, match, opts):
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = ''.join(patch.diff(repo, repo.dirstate.parents()[0],
                           match=match, changes=changes, opts=diffopts))
        
        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()
        
        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        contenders = {}
        for h in chunks:
            try: contenders.update(dict.fromkeys(h.files()))
            except AttributeError: pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])

        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)
            
            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug("saving patch to shelve\n")
                    if opts['append']:
                        f = repo.opener(shelfpath, "a")
                    else:
                        f = repo.opener(shelfpath, "w")
                    f.write(sp.getvalue())
                    del f
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    ui.debug('removing shelve file\n')
                    os.unlink(repo.join(shelfpath))
                except OSError:
                    pass

            return 0
        finally:
            try:
                for realname, tmpname in backups.iteritems():
                    ui.debug('removing backup for %r : %r\n' % (realname, tmpname))
                    os.unlink(tmpname)
                os.rmdir(backupdir)
            except OSError:
                pass
Ejemplo n.º 18
0
def dorecord(ui, repo, committer, *pats, **opts):
    if not ui.interactive:
        raise util.Abort(_('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        It's job is to interactively filter local changes, and accordingly
        prepare working dir into a state, where the job can be delegated to
        non-interactive commit command such as 'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, working dir
        state is restored to original.

        In the end we'll record intresting changes, and everything else will be
        left in place, so the user can continue his work.
        """
        if match.files():
            changes = None
        else:
            changes = repo.status(match=match)[:3]
            modified, added, removed = changes
            match = cmdutil.matchfiles(repo, modified + added + removed)
        diffopts = mdiff.diffopts(git=True, nodates=True)
        chunks = patch.diff(repo,
                            repo.dirstate.parents()[0],
                            match=match,
                            changes=changes,
                            opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        if changes is not None:
            chunks = filterpatch(opts, parsepatch(changes, fp), chunkselector)
        else:
            chgs = repo.status(match=match)[:3]
            chunks = filterpatch(opts, parsepatch(chgs, fp), chunkselector)

        del fp

        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        newfiles = [f for f in match.files() if f in contenders]

        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        if changes is None:
            match = cmdutil.matchfiles(repo, newfiles)
            changes = repo.status(match=match)
        modified = dict.fromkeys(changes[0])

        # 2. backup changed files, so we can restore them in the end
        backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in modified:
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_') +
                                               '.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug(_('backup %r as %r\n') % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                except patch.PatchError, err:
                    s = str(err)
                    if s:
                        raise util.Abort(s)
                    else:
                        raise util.Abort(_('patch failed to apply'))
            del fp

            # 4. We prepared working directory according to filtered patch.
            #    Now is the time to delegate the job to commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call a
            # highlevel command with list of pathnames relative to repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                committer(ui, repo, newfiles, opts)
            finally:
                os.chdir(cwd)

            return 0
Ejemplo n.º 19
0
def unshelve(ui, repo, **opts):
    """restore shelved changes"""

    # Shelf name and path
    shelfname = opts.get("name")
    shelfpath = getshelfpath(repo, shelfname)

    # List all the active shelves by name and return '
    if opts["list"]:
        listshelves(ui, repo)
        return

    try:
        patch_diff = repo.opener(shelfpath).read()
        fp = cStringIO.StringIO(patch_diff)
        if opts["inspect"]:
            # wrap ui.write so diff output can be labeled/colorized
            def wrapwrite(orig, *args, **kw):
                label = kw.pop("label", "")
                if label:
                    label += " "
                for chunk, l in patch.difflabel(lambda: args):
                    orig(chunk, label=label + l)

            oldwrite = ui.write
            extensions.wrapfunction(ui, "write", wrapwrite)
            try:
                ui.status(fp.getvalue())
            finally:
                ui.write = oldwrite
        else:
            files = []
            ac = parsepatch(fp)
            for chunk in ac:
                if isinstance(chunk, header):
                    files += chunk.files()
            backupdir = repo.join("shelve-backups")
            backups = makebackup(ui, repo, backupdir, set(files))

            ui.debug("applying shelved patch\n")
            patchdone = 0
            try:
                try:
                    fp.seek(0)
                    patch.internalpatch(ui, repo, fp, 1)
                    patchdone = 1
                except:
                    if opts["force"]:
                        patchdone = 1
                    else:
                        ui.status("restoring backup files\n")
                        for realname, tmpname in backups.iteritems():
                            ui.debug("restoring %r to %r\n" % (tmpname, realname))
                            util.copyfile(tmpname, repo.wjoin(realname))
            finally:
                try:
                    ui.debug("removing backup files\n")
                    shutil.rmtree(backupdir, True)
                except OSError:
                    pass

            if patchdone:
                ui.debug("removing shelved patches\n")
                os.unlink(repo.join(shelfpath))
                ui.status("unshelve completed\n")
    except IOError:
        ui.warn("nothing to unshelve\n")
Ejemplo n.º 20
0
    def shelvefunc(ui, repo, message, match, opts):
        files = []
        if match.files():
            changes = None
        else:
            changes = repo.status(match = match)[:3]
            modified, added, removed = changes
            files = modified + added + removed
            match = cmdutil.matchfiles(repo, files)
        diffopts = repo.attic.diffopts( {'git':True, 'nodates':True})
        chunks = patch.diff(repo, repo.dirstate.parents()[0], match = match,
                            changes = changes, opts = diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        ac = record.parsepatch(fp)
        chunks = record.filterpatch(ui, ac)
        # and a not-intending-to apply subset of it
        rc = refilterpatch(ac, chunks)
        del fp

        contenders = {}
        for h in chunks:
            try: contenders.update(dict.fromkeys(h.files()))
            except AttributeError: pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])
        backups = {}
        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo, repo.dirstate.parents()[0], backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug(_("saving patch to %s\n") % (name))
                    s = repo.attic
                    f = s.opener(name, 'w')
                    f.write(sp.getvalue())
                    del f
                    s.currentpatch = name
                    s.persiststate()
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug(_('restoring %r to %r\n') % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                except OSError:
                    pass

            return 0
        finally:
            try:
                for realname, tmpname in backups.iteritems():
                    ui.debug(_('removing backup for %r : %r\n') % (realname, tmpname))
                    os.unlink(tmpname)
                os.rmdir(backupdir)
            except OSError:
                pass
Ejemplo n.º 21
0
def shelve(ui, repo, *pats, **opts):
    '''interactively select changes to set aside

    If a list of files is omitted, all changes reported by :hg:` status`
    will be candidates for shelving.

    You will be prompted for whether to shelve changes to each
    modified file, and for files with multiple changes, for each
    change to use.

    The shelve command works with the Color extension to display
    diffs in color.

    On each prompt, the following responses are possible::

      y - shelve this change
      n - skip this change

      s - skip remaining changes to this file
      f - shelve remaining changes to this file

      d - done, skip remaining changes and files
      a - shelve all changes to all remaining files
      q - quit, shelving no changes

      ? - display help
    '''

    if not ui.interactive():
        raise util.Abort(_('shelve can only be run interactively'))

    # List all the active shelves by name and return '
    if opts['list']:
        listshelves(ui, repo)
        return

    forced = opts['force'] or opts['append']

    # Shelf name and path
    shelfname = opts.get('name')
    shelfpath = getshelfpath(repo, shelfname)

    if os.path.exists(repo.join(shelfpath)) and not forced:
        raise util.Abort(_('shelve data already exists'))

    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        diffopts = patch.diffopts(ui, opts={'git': True, 'nodates': True})
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO(''.join(chunks))

        try:
            ac = parsepatch(fp)
        except patch.PatchError, err:
            raise util.Abort(_('error parsing patch: %s') % err)

        del fp

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        changed = modified + added + removed
        newfiles = set(f for f in changed if f in contenders)
        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        # 2. backup changed files, so we can restore them in case of error
        backupdir = repo.join('shelve-backups')
        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo  (clean)
                opts['no_backup'] = True
                cmdutil.revert(ui, repo, repo['.'], parents,
                               *[os.path.join(repo.root, f) for f in newfiles],
                               **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. (apply)
                if dopatch:
                    try:
                        ui.debug('applying patch\n')
                        ui.debug(fp.getvalue())
                        patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                    except patch.PatchError, err:
                        raise util.Abort(str(err))
                del fp

                # 4. We prepared working directory according to filtered
                #    patch. Now is the time to save the shelved changes!
                ui.debug("saving patch to shelve\n")
                if opts['append']:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(f):
                            os.unlink(f)
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                        # Our calls to copystat() here and above are a
                        # hack to trick any editors that have f open that
                        # we haven't modified them.
                        #
                        # Also note that this racy as an editor could
                        # notice the file's mtime before we've finished
                        # writing it.
                        shutil.copystat(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug('removing shelve file\n')
                    if os.path.isfile(repo.wjoin(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)
Ejemplo n.º 22
0
def dorecord(ui, repo, commitfunc, *pats, **opts):
    if not ui.interactive():
        raise util.Abort(_('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        Its job is to interactively filter local changes, and
        accordingly prepare working directory into a state in which the
        job can be delegated to a non-interactive commit command such as
        'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, the
        working directory is restored to its original state.

        In the end we'll record interesting changes, and everything else
        will be left in place, so the user can continue working.
        """

        merge = len(repo[None].parents()) > 1
        if merge:
            raise util.Abort(
                _('cannot partially commit a merge '
                  '(use "hg commit" instead)'))

        changes = repo.status(match=match)[:3]
        diffopts = mdiff.diffopts(git=True, nodates=True)
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, parsepatch(fp))
        del fp

        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        changed = changes[0] + changes[1] + changes[2]
        newfiles = [f for f in changed if f in contenders]
        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        modified = set(changes[0])

        # 2. backup changed files, so we can restore them in the end
        backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in modified:
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_') +
                                               '.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                shutil.copystat(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo,
                          repo.dirstate.parents()[0],
                          lambda key: key in backups)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    pfiles = {}
                    patch.internalpatch(fp,
                                        ui,
                                        1,
                                        repo.root,
                                        files=pfiles,
                                        eolmode=None)
                    cmdutil.updatedir(ui, repo, pfiles)
                except patch.PatchError, err:
                    raise util.Abort(str(err))
            del fp

            # 4. We prepared working directory according to filtered
            #    patch. Now is the time to delegate the job to
            #    commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call
            # a highlevel command with list of pathnames relative to
            # repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                commitfunc(ui, repo, *newfiles, **opts)
            finally:
                os.chdir(cwd)

            return 0
Ejemplo n.º 23
0
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo, repo.dirstate.p1(), lambda key: key in backups)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug("applying patch\n")
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                except patch.PatchError, err:
                    raise util.Abort(str(err))
            del fp

            # 4. We prepared working directory according to filtered
            #    patch. Now is the time to delegate the job to
            #    commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call
            # a highlevel command with list of pathnames relative to
            # repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                commitfunc(ui, repo, *newfiles, **opts)
Ejemplo n.º 24
0
    def shelvefunc(ui, repo, message, match, opts):
        # If an MQ patch is applied, consider all qdiff changes
        if hasattr(repo, 'mq') and repo.mq.applied and repo['.'] == repo['qtip']:
            qtip = repo['.']
            basenode = qtip.parents()[0].node()
        else:
            basenode = repo.dirstate.parents()[0]

        changes = repo.status(node1=basenode, match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = ''.join(patch.diff(repo, basenode, match=match,
                             changes=changes, opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()
        chunks = filterpatch(ui, ac)
        rc = refilterpatch(ac, chunks)

        contenders = {}
        for h in chunks:
            try: contenders.update(dict.fromkeys(h.files()))
            except AttributeError: pass

        newfiles = [f for f in files if f in contenders]

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        modified = dict.fromkeys(changes[0])

        backupdir = repo.join('shelve-backups')

        try:
            bkfiles = [f for f in newfiles if f in modified]
            backups = makebackup(ui, repo, backupdir, bkfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(sp)
            doshelve = sp.tell()
            sp.seek(0)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                if backups:
                    hg.revert(repo, basenode, backups.has_key)

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug(_('applying patch\n'))
                    ui.debug(fp.getvalue())
                    patch.internalpatch(fp, ui, 1, repo.root, eolmode=None)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                if doshelve:
                    ui.debug(_('saving patch to shelve\n'))
                    if opts['append']:
                        f = repo.opener('shelve', "a")
                    else:
                        f = repo.opener('shelve', "w")
                    f.write(sp.getvalue())
                    del f
                del sp
            except:
                try:
                    for realname, tmpname in backups.iteritems():
                        ui.debug(_('restoring %r to %r\n') % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    ui.debug(_('removing shelve file\n'))
                    os.unlink(repo.join('shelve'))
                except (IOError, OSError), e:
                    ui.warn(_('abort: backup restore failed, %s\n') % str(e))

            return 0