Example #1
0
def snapshot(ui, repo, files, node, tmproot, listsubrepos):
  """snapshot files as of some revision

    if not using snapshot, -I/-X does not work and recursive diff
    in tools like kdiff3 and meld displays too many files.
  """
  dirname = os.path.basename(repo.root)
  if dirname == '':
    dirname = 'root'
  if node is not None:
    dirname = '%s.%s' % (dirname, short(node))
  base = os.path.join(tmproot, dirname)
  os.mkdir(base)

  if node is not None:
    ui.note(_('making snapshot of %d files from rev %s\n') %
            (len(files), short(node)))
  else:
    ui.note(_('making snapshot of %d files from working directory\n') %
            (len(files)))

  if files:
    repo.ui.setconfig('ui', 'archivemeta', False)

    archival.archive(
        repo,
        base,
        node,
        'files',
        match=scmutil.matchfiles(repo, files),
        subrepos=listsubrepos)

  return dirname
Example #2
0
def get_lines_and_files(ui, repo, ctx1, ctx2, fns):
    # Returns a list of dicts:
    # [{'filename': <file>, 'added': nn, 'removed': nn},
    #  {....},
    # ]
    files = []
    currentfile = {}
    fmatch = scmutil.matchfiles(repo, fns)
    diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
    for l in diff.split('\n'):
        if l.startswith("diff -r"):
            # If we have anything in currentfile, append to list
            if currentfile:
                files.append(currentfile)
                currentfile = {}

            # This is the first line of a file set current file
            currentfile['filename'] = l.split(' ')[-1]
            currentfile['added'] = 0
            currentfile['removed'] = 0
            
        if l.startswith("+") and not l.startswith("+++ "):
            currentfile['added'] += 1
        elif l.startswith("-") and not l.startswith("--- "):
            currentfile['removed'] += 1
    # The last file won't have been added to files, so add it now
    files.append(currentfile)
    return files
Example #3
0
def get_lines_and_files(ui, repo, ctx1, ctx2, fns):
    # Returns a list of dicts:
    # [{'filename': <file>, 'added': nn, 'removed': nn},
    #  {....},
    # ]
    files = []
    currentfile = {}
    fmatch = scmutil.matchfiles(repo, fns)
    diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
    for l in diff.split('\n'):
        if l.startswith("diff -r"):
            # If we have anything in currentfile, append to list
            if currentfile:
                files.append(currentfile)
                currentfile = {}

            # This is the first line of a file set current file
            currentfile['filename'] = l.split(' ')[-1]
            currentfile['added'] = 0
            currentfile['removed'] = 0

        if l.startswith("+") and not l.startswith("+++ "):
            currentfile['added'] += 1
        elif l.startswith("-") and not l.startswith("--- "):
            currentfile['removed'] += 1
    # The last file won't have been added to files, so add it now
    files.append(currentfile)
    return files
Example #4
0
def changedlines(ui, repo, ctx1, ctx2, fns):
    added, removed = 0, 0
    fmatch = scmutil.matchfiles(repo, fns)
    diff = "".join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
    for l in diff.split("\n"):
        if l.startswith("+") and not l.startswith("+++ "):
            added += 1
        elif l.startswith("-") and not l.startswith("--- "):
            removed += 1
    return (added, removed)
Example #5
0
def changedlines(ui, repo, ctx1, ctx2, fns):
    added, removed = 0, 0
    fmatch = scmutil.matchfiles(repo, fns)
    diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
    for l in diff.split('\n'):
        if l.startswith("+") and not l.startswith("+++ "):
            added += 1
        elif l.startswith("-") and not l.startswith("--- "):
            removed += 1
    return (added, removed)
Example #6
0
def _shelvecreatedcommit(repo, node, name):
    info = {'node': nodemod.hex(node)}
    shelvedfile(repo, name, 'shelve').writeinfo(info)
    bases = list(mutableancestors(repo[node]))
    shelvedfile(repo, name, 'hg').writebundle(bases, node)
    # Create a matcher so that prefetch doesn't attempt to fetch the entire
    # repository pointlessly.
    match = scmutil.matchfiles(repo, repo[node].files())
    with shelvedfile(repo, name, patchextension).opener('wb') as fp:
        cmdutil.exportfile(repo, [node],
                           fp,
                           opts=mdiff.diffopts(git=True),
                           match=match)
Example #7
0
def snapshot(ui, repo, files, node, tmproot, listsubrepos):
    """snapshot files as of some revision
    if not using snapshot, -I/-X does not work and recursive diff
    in tools like kdiff3 and meld displays too many files."""
    dirname = os.path.basename(repo.root)
    if dirname == b"":
        dirname = b"root"
    if node is not None:
        dirname = b'%s.%s' % (dirname, short(node))
    base = os.path.join(tmproot, dirname)
    os.mkdir(base)
    fnsandstat = []

    if node is not None:
        ui.note(
            _(b'making snapshot of %d files from rev %s\n')
            % (len(files), short(node))
        )
    else:
        ui.note(
            _(b'making snapshot of %d files from working directory\n')
            % (len(files))
        )

    if files:
        repo.ui.setconfig(b"ui", b"archivemeta", False)

        archival.archive(
            repo,
            base,
            node,
            b'files',
            match=scmutil.matchfiles(repo, files),
            subrepos=listsubrepos,
        )

        for fn in sorted(files):
            wfn = util.pconvert(fn)
            ui.note(b'  %s\n' % wfn)

            if node is None:
                dest = os.path.join(base, wfn)

                fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
    return dirname, fnsandstat
Example #8
0
def _getrevs(bundle, onto):
    'extracts and validates the revs to be imported'
    validaterevset(bundle, 'bundle()')
    revs = [bundle[r] for r in bundle.revs('sort(bundle())')]
    onto = bundle[onto.hex()]
    # Fast forward update, no rebase needed
    if list(bundle.set('bundle() & %d::', onto.rev())):
        return revs, onto

    if revs:
        # We want to rebase the highest bundle root that is an ancestor of
        # `onto`.
        oldonto = list(bundle.set('max(parents(bundle()) - bundle() & ::%d)',
                                  onto.rev()))
        if not oldonto:
            # If there's no shared history, only allow the rebase if the
            # incoming changes are completely distinct.
            sharedparents = list(bundle.set('parents(bundle()) - bundle()'))
            if not sharedparents:
                return revs, bundle[nullid]
            raise error.Abort(_('pushed commits do not branch from an ancestor '
                               'of the desired destination %s') % onto.hex())
        oldonto = oldonto[0]

        # Computes a list of all files that are in the changegroup, and diffs it
        # against all the files that changed between the old onto (ex: our old
        # bookmark location) and the new onto (ex: the server's actual bookmark
        # location). Since oldonto->onto is the distance of the rebase, this
        # should catch any conflicting changes.
        files = set()
        for rev in revs:
            files |= set(rev.files())
        filematcher = scmutil.matchfiles(bundle, files)
        commonmanifest = oldonto.manifest().matches(filematcher)
        ontomanifest = onto.manifest().matches(filematcher)
        conflicts = ontomanifest.diff(commonmanifest).keys()
        if conflicts:
            raise error.Abort(_('conflicting changes in:\n%s') %
                             ''.join('    %s\n' % f for f in sorted(conflicts)))

    return revs, oldonto
Example #9
0
def _prefetchfiles(repo, workqueue, basepaths):
    toprefetch = set()

    # Prefetch the files that will be fixed.
    for rev, path in workqueue:
        if rev == wdirrev:
            continue
        toprefetch.add((rev, path))

    # Prefetch the base contents for lineranges().
    for (baserev, fixrev, path), basepath in basepaths.items():
        toprefetch.add((baserev, basepath))

    if toprefetch:
        scmutil.prefetchfiles(
            repo,
            [
                (rev, scmutil.matchfiles(repo, [path]))
                for rev, path in toprefetch
            ],
        )
Example #10
0
def snapshot(ui, repo, files, node, tmproot, listsubrepos):
    '''snapshot files as of some revision
    if not using snapshot, -I/-X does not work and recursive diff
    in tools like kdiff3 and meld displays too many files.'''
    dirname = os.path.basename(repo.root)
    if dirname == "":
        dirname = "root"
    if node is not None:
        dirname = '%s.%s' % (dirname, short(node))
    base = os.path.join(tmproot, dirname)
    os.mkdir(base)
    fns_and_mtime = []

    if node is not None:
        ui.note(_('making snapshot of %d files from rev %s\n') %
                (len(files), short(node)))
    else:
        ui.note(_('making snapshot of %d files from working directory\n') %
            (len(files)))

    if files:
        repo.ui.setconfig("ui", "archivemeta", False)

        archival.archive(repo, base, node, 'files',
                         matchfn=scmutil.matchfiles(repo, files),
                         subrepos=listsubrepos)

        for fn in sorted(files):
            wfn = util.pconvert(fn)
            ui.note('  %s\n' % wfn)

            if node is None:
                dest = os.path.join(base, wfn)

                fns_and_mtime.append((dest, repo.wjoin(fn),
                                      os.lstat(dest).st_mtime))
    return dirname, fns_and_mtime
Example #11
0
def _getrevs(bundle, onto):
    'extracts and validates the revs to be imported'
    validaterevset(bundle, 'bundle()')
    revs = [bundle[r] for r in bundle.revs('sort(bundle())')]
    onto = bundle[onto.hex()]

    if revs:
        tail = revs[0]

        if onto.ancestor(tail).hex() != tail.p1().hex():
            raise util.Abort(_('missing changesets between %r and %r') %
                             (onto.ancestor(tail).hex(),
                              tail.p1().hex()))

        # Is there a more efficient way to do this check?
        files = reduce(operator.or_, [set(rev.files()) for rev in revs], set())
        filematcher = scmutil.matchfiles(tail.repo(), files)
        commonmanifest = tail.p1().manifest().matches(filematcher)
        ontomanifest = onto.manifest().matches(filematcher)
        conflicts = ontomanifest.diff(commonmanifest).keys()
        if conflicts:
            raise util.Abort(_('conflicting changes in %r') % conflicts)

    return revs
Example #12
0
 def findconflicts():
     # Returns all the files touched in the bundle that are also touched
     # between the old onto (ex: our old bookmark location) and the new
     # onto (ex: the server's actual bookmark location).
     filematcher = scmutil.matchfiles(bundle, bundlefiles)
     return onto.manifest().diff(oldonto.manifest(), filematcher).keys()
Example #13
0
#   M bar-m
#   A bar-a
#   R bar-r
#   C foo

from mercurial import scmutil

print('== checking workingctx.status:')

wctx = repo[None]
print('wctx._status=%s' % (str(wctx._status)))

print('=== with "pattern match":')
print(
    actx1.status(other=wctx,
                 match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
print('wctx._status=%s' % (str(wctx._status)))
print(
    actx2.status(other=wctx,
                 match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
print('wctx._status=%s' % (str(wctx._status)))

print('=== with "always match" and "listclean=True":')
print(actx1.status(other=wctx, listclean=True))
print('wctx._status=%s' % (str(wctx._status)))
print(actx2.status(other=wctx, listclean=True))
print('wctx._status=%s' % (str(wctx._status)))

print("== checking workingcommitctx.status:")

wcctx = context.workingcommitctx(repo,
Example #14
0
def matchfilesutil(repo, files):
    # matchfiles moved from cmdutil to scmutil in hg 1.9
    if (hasattr(cmdutil, 'matchfiles')):
        return cmdutil.matchfiles(repo, files)
    else:
        return scmutil.matchfiles(repo, files)
Example #15
0
def shelve(ui, repo, *pats, **opts):
    '''interactively select changes to set aside

    If a list of files is omitted, all changes reported by :hg:` status`
    will be candidates for shelving.

    You will be prompted for whether to shelve changes to each
    modified file, and for files with multiple changes, for each
    change to use.

    The shelve command works with the Color extension to display
    diffs in color.

    On each prompt, the following responses are possible::

      y - shelve this change
      n - skip this change

      s - skip remaining changes to this file
      f - shelve remaining changes to this file

      d - done, skip remaining changes and files
      a - shelve all changes to all remaining files
      q - quit, shelving no changes

      ? - display help
    '''

    if not ui.interactive():
        raise util.Abort(_('shelve can only be run interactively'))

    # List all the active shelves by name and return '
    if opts['list']:
        listshelves(ui, repo)
        return

    forced = opts['force'] or opts['append']

    # Shelf name and path
    shelfname = opts.get('name')
    shelfpath = getshelfpath(repo, shelfname)

    if os.path.exists(repo.join(shelfpath)) and not forced:
        raise util.Abort(_('shelve data already exists'))

    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        diffopts = patch.diffopts(ui, opts={'git': True, 'nodates': True})
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO(''.join(chunks))

        try:
            ac = parsepatch(fp)
        except patch.PatchError, err:
            raise util.Abort(_('error parsing patch: %s') % err)

        del fp

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        changed = modified + added + removed
        newfiles = set(f for f in changed if f in contenders)
        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        # 2. backup changed files, so we can restore them in case of error
        backupdir = repo.join('shelve-backups')
        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo  (clean)
                opts['no_backup'] = True
                cmdutil.revert(ui, repo, repo['.'], parents,
                               *[os.path.join(repo.root, f) for f in newfiles],
                               **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. (apply)
                if dopatch:
                    try:
                        ui.debug('applying patch\n')
                        ui.debug(fp.getvalue())
                        patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                    except patch.PatchError, err:
                        raise util.Abort(str(err))
                del fp

                # 4. We prepared working directory according to filtered
                #    patch. Now is the time to save the shelved changes!
                ui.debug("saving patch to shelve\n")
                if opts['append']:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(f):
                            os.unlink(f)
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                        # Our calls to copystat() here and above are a
                        # hack to trick any editors that have f open that
                        # we haven't modified them.
                        #
                        # Also note that this racy as an editor could
                        # notice the file's mtime before we've finished
                        # writing it.
                        shutil.copystat(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug('removing shelve file\n')
                    if os.path.isfile(repo.wjoin(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)
Example #16
0
 def new(self, repo, patchfn, *pats, **opts):
     """options:
        msg: a string or a no-argument function returning a string
     """
     msg = opts.get('msg')
     user = opts.get('user')
     date = opts.get('date')
     if date:
         date = util.parsedate(date)
     diffopts = self.diffopts({'git': opts.get('git')})
     if opts.get('checkname', True):
         self.checkpatchname(patchfn)
     inclsubs = self.checksubstate(repo)
     if inclsubs:
         inclsubs.append('.hgsubstate')
         substatestate = repo.dirstate['.hgsubstate']
     if opts.get('include') or opts.get('exclude') or pats:
         if inclsubs:
             pats = list(pats or []) + inclsubs
         match = scmutil.match(repo[None], pats, opts)
         # detect missing files in pats
         def badfn(f, msg):
             if f != '.hgsubstate': # .hgsubstate is auto-created
                 raise util.Abort('%s: %s' % (f, msg))
         match.bad = badfn
         changes = repo.status(match=match)
         m, a, r, d = changes[:4]
     else:
         changes = self.checklocalchanges(repo, force=True)
         m, a, r, d = changes
     match = scmutil.matchfiles(repo, m + a + r + inclsubs)
     if len(repo[None].parents()) > 1:
         raise util.Abort(_('cannot manage merge changesets'))
     commitfiles = m + a + r
     self.checktoppatch(repo)
     insert = self.fullseriesend()
     wlock = repo.wlock()
     try:
         try:
             # if patch file write fails, abort early
             p = self.opener(patchfn, "w")
         except IOError, e:
             raise util.Abort(_('cannot write patch "%s": %s')
                              % (patchfn, e.strerror))
         try:
             if self.plainmode:
                 if user:
                     p.write("From: " + user + "\n")
                     if not date:
                         p.write("\n")
                 if date:
                     p.write("Date: %d %d\n\n" % date)
             else:
                 p.write("# HG changeset patch\n")
                 p.write("# Parent "
                         + hex(repo[None].p1().node()) + "\n")
                 if user:
                     p.write("# User " + user + "\n")
                 if date:
                     p.write("# Date %s %s\n\n" % date)
             if util.safehasattr(msg, '__call__'):
                 msg = msg()
             commitmsg = msg and msg or ("[mq]: %s" % patchfn)
             n = newcommit(repo, None, commitmsg, user, date, match=match,
                           force=True)
             if n is None:
                 raise util.Abort(_("repo commit failed"))
             try:
                 self.fullseries[insert:insert] = [patchfn]
                 self.applied.append(statusentry(n, patchfn))
                 self.parseseries()
                 self.seriesdirty = True
                 self.applieddirty = True
                 if msg:
                     msg = msg + "\n\n"
                     p.write(msg)
                 if commitfiles:
                     parent = self.qparents(repo, n)
                     if inclsubs:
                         self.putsubstate2changes(substatestate, changes)
                     chunks = patchmod.diff(repo, node1=parent, node2=n,
                                            changes=changes, opts=diffopts)
                     for chunk in chunks:
                         p.write(chunk)
                 p.close()
                 r = self.qrepo()
                 if r:
                     r[None].add([patchfn])
             except: # re-raises
                 repo.rollback()
                 raise
         except Exception:
             patchpath = self.join(patchfn)
             try:
                 os.unlink(patchpath)
             except OSError:
                 self.ui.warn(_('error unlinking %s\n') % patchpath)
             raise
         self.removeundo(repo)
Example #17
0
def shelve(ui, repo, *pats, **opts):
    '''interactively select changes to set aside

    If a list of files is omitted, all changes reported by :hg:` status`
    will be candidates for shelving.

    You will be prompted for whether to shelve changes to each
    modified file, and for files with multiple changes, for each
    change to use.

    The shelve command works with the Color extension to display
    diffs in color.

    On each prompt, the following responses are possible::

      y - shelve this change
      n - skip this change

      s - skip remaining changes to this file
      f - shelve remaining changes to this file

      d - done, skip remaining changes and files
      a - shelve all changes to all remaining files
      q - quit, shelving no changes

      ? - display help
    '''

    if not ui.interactive() and not (opts['all'] or opts['list']):
        raise util.Abort(_('shelve can only be run interactively'))

    # List all the active shelves by name and return '
    if opts['list']:
        listshelves(ui, repo)
        return

    forced = opts['force'] or opts['append']

    # Shelf name and path
    shelfname = opts.get('name')
    shelfpath = getshelfpath(repo, shelfname)

    if os.path.exists(repo.join(shelfpath)) and not forced:
        raise util.Abort(_('shelve data already exists'))

    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:3]
        modified, added, removed = changes
        diffopts = patch.diffopts(ui, opts={'git': True, 'nodates': True})
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO(''.join(chunks))

        try:
            ac = parsepatch(fp)
        except patch.PatchError, err:
            raise util.Abort(_('error parsing patch: %s') % err)

        del fp

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        changed = modified + added + removed
        newfiles = set(f for f in changed if f in contenders)
        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        # 2. backup changed files, so we can restore them in case of error
        backupdir = repo.join('shelve-backups')
        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo  (clean)
                opts['no_backup'] = True
                cmdutil.revert(ui, repo, repo['.'], parents,
                               *[repo.wjoin(f) for f in newfiles], **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. (apply)
                if dopatch:
                    try:
                        ui.debug('applying patch\n')
                        ui.debug(fp.getvalue())
                        patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                    except patch.PatchError, err:
                        raise util.Abort(str(err))
                del fp

                # 4. We prepared working directory according to filtered
                #    patch. Now is the time to save the shelved changes!
                ui.debug("saving patch to shelve\n")
                if opts['append']:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(repo.wjoin(f)):
                            os.unlink(repo.wjoin(f))
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                        # Our calls to copystat() here and above are a
                        # hack to trick any editors that have f open that
                        # we haven't modified them.
                        #
                        # Also note that this racy as an editor could
                        # notice the file's mtime before we've finished
                        # writing it.
                        shutil.copystat(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug('removing shelve file\n')
                    if os.path.isfile(repo.join(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)
Example #18
0
    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = "".join(patch.diff(repo, parents[0], match=match, changes=changes, opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()

        chunks = filterpatch(ui, ac, not opts["all"])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        newfiles = set(f for f in files if f in contenders)

        if not newfiles:
            ui.status(_("no changes to shelve\n"))
            return 0

        backupdir = repo.join("shelve-backups")

        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                opts["no_backup"] = True
                cmdutil.revert(ui, repo, repo["."], parents, *[os.path.join(repo.root, f) for f in newfiles], **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug("applying patch\n")
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                ui.debug("saving patch to shelve\n")
                if opts["append"]:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(f):
                            os.unlink(f)
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug("restoring %r to %r\n" % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug("removing shelve file\n")
                    if os.path.isfile(repo.wjoin(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)

            return 0
Example #19
0
    def _apply(self, repo, series, list=False, update_status=True,
               strict=False, patchdir=None, merge=None, all_files=None,
               tobackup=None, keepchanges=False):
        """returns (error, hash)

        error = 1 for unable to read, 2 for patch failed, 3 for patch
        fuzz. tobackup is None or a set of files to backup before they
        are modified by a patch.
        """
        # TODO unify with commands.py
        if not patchdir:
            patchdir = self.path
        err = 0
        n = None
        for patchname in series:
            pushable, reason = self.pushable(patchname)
            if not pushable:
                self.explainpushable(patchname, all_patches=True)
                continue
            self.ui.status(_("applying %s\n") % patchname)
            pf = os.path.join(patchdir, patchname)

            try:
                ph = patchheader(self.join(patchname), self.plainmode)
            except IOError:
                self.ui.warn(_("unable to read %s\n") % patchname)
                err = 1
                break

            message = ph.message
            if not message:
                # The commit message should not be translated
                message = "imported patch %s\n" % patchname
            else:
                if list:
                    # The commit message should not be translated
                    message.append("\nimported patch %s" % patchname)
                message = '\n'.join(message)

            if ph.haspatch:
                if tobackup:
                    touched = patchmod.changedfiles(self.ui, repo, pf)
                    touched = set(touched) & tobackup
                    if touched and keepchanges:
                        raise AbortNoCleanup(
                            _("local changes found, refresh first"))
                    self.backup(repo, touched, copy=True)
                    tobackup = tobackup - touched
                (patcherr, files, fuzz) = self.patch(repo, pf)
                if all_files is not None:
                    all_files.update(files)
                patcherr = not patcherr
            else:
                self.ui.warn(_("patch %s is empty\n") % patchname)
                patcherr, files, fuzz = 0, [], 0

            if merge and files:
                # Mark as removed/merged and update dirstate parent info
                removed = []
                merged = []
                for f in files:
                    if os.path.lexists(repo.wjoin(f)):
                        merged.append(f)
                    else:
                        removed.append(f)
                for f in removed:
                    repo.dirstate.remove(f)
                for f in merged:
                    repo.dirstate.merge(f)
                p1, p2 = repo.dirstate.parents()
                repo.setparents(p1, merge)

            match = scmutil.matchfiles(repo, files or [])
            oldtip = repo['tip']
            n = newcommit(repo, None, message, ph.user, ph.date, match=match,
                          force=True)
            if repo['tip'] == oldtip:
                raise util.Abort(_("qpush exactly duplicates child changeset"))
            if n is None:
                raise util.Abort(_("repository commit failed"))

            if update_status:
                self.applied.append(statusentry(n, patchname))

            if patcherr:
                self.ui.warn(_("patch failed, rejects left in working dir\n"))
                err = 2
                break

            if fuzz and strict:
                self.ui.warn(_("fuzz found when applying patch, stopping\n"))
                err = 3
                break
        return (err, n)
Example #20
0
# status at this point:
#   M bar-m
#   A bar-a
#   R bar-r
#   C foo

from mercurial import scmutil

print '== checking workingctx.status:'

wctx = repo[None]
print 'wctx._status=%s' % (str(wctx._status))

print '=== with "pattern match":'
print actx1.status(other=wctx,
                   match=scmutil.matchfiles(repo, ['bar-m', 'foo']))
print 'wctx._status=%s' % (str(wctx._status))
print actx2.status(other=wctx,
                   match=scmutil.matchfiles(repo, ['bar-m', 'foo']))
print 'wctx._status=%s' % (str(wctx._status))

print '=== with "always match" and "listclean=True":'
print actx1.status(other=wctx, listclean=True)
print 'wctx._status=%s' % (str(wctx._status))
print actx2.status(other=wctx, listclean=True)
print 'wctx._status=%s' % (str(wctx._status))

print "== checking workingcommitctx.status:"

wcctx = context.workingcommitctx(repo,
                                 scmutil.status(['bar-m'],
Example #21
0
    def shelvefunc(ui, repo, message, match, opts):
        parents = repo.dirstate.parents()
        changes = repo.status(match=match)[:5]
        modified, added, removed = changes[:3]
        files = modified + added + removed
        diffopts = mdiff.diffopts(git=True, nodates=True)
        patch_diff = ''.join(
            patch.diff(repo,
                       parents[0],
                       match=match,
                       changes=changes,
                       opts=diffopts))

        fp = cStringIO.StringIO(patch_diff)
        ac = parsepatch(fp)
        fp.close()

        chunks = filterpatch(ui, ac, not opts['all'])
        rc = refilterpatch(ac, chunks)

        # set of files to be processed
        contenders = {}
        for h in chunks:
            try:
                contenders.update(dict.fromkeys(h.files()))
            except AttributeError:
                pass

        # exclude sources of copies that are otherwise untouched
        newfiles = set(f for f in files if f in contenders)

        if not newfiles:
            ui.status(_('no changes to shelve\n'))
            return 0

        backupdir = repo.join('shelve-backups')

        try:
            backups = makebackup(ui, repo, backupdir, newfiles)

            # patch to shelve
            sp = cStringIO.StringIO()
            for c in chunks:
                c.write(sp)

            # patch to apply to shelved files
            fp = cStringIO.StringIO()
            for c in rc:
                # skip files not selected for shelving
                if c.filename() in newfiles:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            try:
                # 3a. apply filtered patch to clean repo (clean)
                opts['no_backup'] = True
                cmdutil.revert(ui, repo, repo['.'], parents,
                               *[os.path.join(repo.root, f) for f in newfiles],
                               **opts)
                for f in added:
                    if f in newfiles:
                        util.unlinkpath(repo.wjoin(f))

                # 3b. apply filtered patch to clean repo (apply)
                if dopatch:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    patch.internalpatch(ui, repo, fp, 1)
                del fp

                # 3c. apply filtered patch to clean repo (shelve)
                ui.debug("saving patch to shelve\n")
                if opts['append']:
                    sp.write(repo.opener(shelfpath).read())
                sp.seek(0)
                f = repo.opener(shelfpath, "w")
                f.write(sp.getvalue())
                del f, sp
            except:
                ui.warn("shelving failed: %s\n" % sys.exc_info()[1])
                try:
                    # re-schedule remove
                    matchremoved = scmutil.matchfiles(repo, removed)
                    cmdutil.forget(ui, repo, matchremoved, "", True)
                    for f in removed:
                        if f in newfiles and os.path.isfile(f):
                            os.unlink(f)
                    # copy back backups
                    for realname, tmpname in backups.iteritems():
                        ui.debug('restoring %r to %r\n' % (tmpname, realname))
                        util.copyfile(tmpname, repo.wjoin(realname))
                    # re-schedule add
                    matchadded = scmutil.matchfiles(repo, added)
                    cmdutil.add(ui, repo, matchadded, False, False, "", True)

                    ui.debug('removing shelve file\n')
                    if os.path.isfile(repo.wjoin(shelfpath)):
                        os.unlink(repo.join(shelfpath))
                except OSError, err:
                    ui.warn("restoring backup failed: %s\n" % err)

            return 0
Example #22
0
# status at this point:
#   M bar-m
#   A bar-a
#   R bar-r
#   C foo

from mercurial import scmutil

print('== checking workingctx.status:')

wctx = repo[None]
print('wctx._status=%s' % (str(wctx._status)))

print('=== with "pattern match":')
print(
    actx1.status(other=wctx, match=scmutil.matchfiles(repo, ['bar-m', 'foo'])))
print('wctx._status=%s' % (str(wctx._status)))
print(
    actx2.status(other=wctx, match=scmutil.matchfiles(repo, ['bar-m', 'foo'])))
print('wctx._status=%s' % (str(wctx._status)))

print('=== with "always match" and "listclean=True":')
print(actx1.status(other=wctx, listclean=True))
print('wctx._status=%s' % (str(wctx._status)))
print(actx2.status(other=wctx, listclean=True))
print('wctx._status=%s' % (str(wctx._status)))

print("== checking workingcommitctx.status:")

wcctx = context.workingcommitctx(repo,
                                 scmutil.status(['bar-m'], ['bar-a'], [], [],