def backup(self): '''Backup committed changes''' parent = self.ws.parent() if not parent: self.ws.ui.warn('Workspace has no parent, committed changes will ' 'not be backed up\n') return out = self.ws.findoutgoing(parent) if not out: return cg = self.ws.repo.changegroup(out, 'bundle') changegroup.writebundle(cg, self.bu.backupfile('bundle'), 'HG10BZ') outnodes = self._outgoing_nodes(parent) if not outnodes: return fp = None try: try: fp = self.bu.open('nodes', 'w') fp.write('%s\n' % '\n'.join(outnodes)) except EnvironmentError, e: raise util.Abort("couldn't store outgoing nodes: %s" % e) finally: if fp and not fp.closed: fp.close()
def writebundle(self, bases, node): cgversion = changegroup.safeversion(self.repo) if cgversion == '01': btype = 'HG10BZ' compression = None else: btype = 'HG20' compression = 'BZ' cg = changegroup.changegroupsubset(self.repo, bases, [node], 'shelve', version=cgversion) changegroup.writebundle(self.ui, cg, self.fname, btype, self.vfs, compression=compression)
def writebundle(self, bases, node): btype = 'HG10BZ' cgversion = '01' compression = None if 'generaldelta' in self.repo.requirements: btype = 'HG20' cgversion = '02' compression = 'BZ' cg = changegroup.changegroupsubset(self.repo, bases, [node], 'shelve', version=cgversion) changegroup.writebundle(self.ui, cg, self.fname, btype, self.vfs, compression=compression)
def incoming(repo, origin, revs): '''return a list of incoming changesets''' if revs: revs = [origin.lookup(rev) for rev in revs] common, incoming, rheads = findcommonincoming(repo, origin, heads=revs, force=False) if not incoming: return incoming if not origin.local(): # create a bundle (uncompressed if other repo is not local) if not revs and origin.capable('changegroupsubset'): revs = rheads if not revs: cg = origin.changegroup(incoming, 'incoming') else: cg = origin.changegroupsubset(incoming, revs, 'incoming') fname = changegroup.writebundle(cg, None, "HG10UN") origin = bundlerepo.bundlerepository(repo.ui, repo.root, fname) incoming = origin.changelog.nodesbetween(incoming, revs)[0] if hasattr(origin, 'close'): origin.close() return incoming
def from_objects(cls, repository, revision_id, time, timezone, target_branch, local_target_branch=None, public_branch=None, message=None): submit_branch = _mod_branch.Branch.open(target_branch) submit_branch.lock_read() try: submit_revision_id = submit_branch.last_revision() repository.fetch(submit_branch.repository, submit_revision_id) graph = repository.get_graph() todo = graph.find_difference(submit_revision_id, revision_id)[1] cg, revidmap = dchangegroup( repository, getattr(submit_branch, "mapping", default_mapping), todo) fn = changegroup.writebundle(cg, None, BUNDLE_TYPE) f = open(fn, 'r') try: contents = f.read() finally: f.close() finally: submit_branch.unlock() return cls(revision_id, None, time, timezone, target_branch, contents, None, public_branch, message)
def _bundle(repo, bases, heads, node, suffix, compress=True): """create a bundle with the specified revisions as a backup""" usebundle2 = (repo.ui.config('experimental', 'bundle2-exp', True) and repo.ui.config('experimental', 'strip-bundle2-version')) if usebundle2: cgversion = repo.ui.config('experimental', 'strip-bundle2-version') if cgversion not in changegroup.packermap: repo.ui.warn(_('unknown strip-bundle2-version value %r; ' 'should be one of %r\n') % (cgversion, sorted(changegroup.packermap.keys()),)) cgversion = '01' usebundle2 = False else: cgversion = '01' cg = changegroup.changegroupsubset(repo, bases, heads, 'strip', version=cgversion) backupdir = "strip-backup" vfs = repo.vfs if not vfs.isdir(backupdir): vfs.mkdir(backupdir) # Include a hash of all the nodes in the filename for uniqueness allcommits = repo.set('%ln::%ln', bases, heads) allhashes = sorted(c.hex() for c in allcommits) totalhash = util.sha1(''.join(allhashes)).hexdigest() name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix) if usebundle2: bundletype = "HG20" elif compress: bundletype = "HG10BZ" else: bundletype = "HG10UN" return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs)
def gincoming(ui, repo, source="default", **opts): """show the incoming changesets alongside an ASCII revision graph Print the incoming changesets alongside a revision graph drawn with ASCII characters. Nodes printed as an @ character are parents of the working directory. """ check_unsupported_flags(opts) source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) other = hg.repository(cmdutil.remoteui(repo, opts), source) revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) ui.status(_('comparing with %s\n') % url.hidepassword(source)) if revs: revs = [other.lookup(rev) for rev in revs] incoming = repo.findincoming(other, heads=revs, force=opts["force"]) if not incoming: try: os.unlink(opts["bundle"]) except: pass ui.status(_("no changes found\n")) return cleanup = None try: fname = opts["bundle"] if fname or not other.local(): # create a bundle (uncompressed if other repo is not local) if revs is None: cg = other.changegroup(incoming, "incoming") else: cg = other.changegroupsubset(incoming, revs, 'incoming') bundletype = other.local() and "HG10BZ" or "HG10UN" fname = cleanup = changegroup.writebundle(cg, fname, bundletype) # keep written bundle? if opts["bundle"]: cleanup = None if not other.local(): # use the created uncompressed bundlerepo other = bundlerepo.bundlerepository(ui, repo.root, fname) chlist = other.changelog.nodesbetween(incoming, revs)[0] revdag = graphrevs(other, chlist, opts) displayer = show_changeset(ui, other, opts, buffered=True) showparents = [ctx.node() for ctx in repo[None].parents()] generate(ui, revdag, displayer, showparents, asciiedges) finally: if hasattr(other, 'close'): other.close() if cleanup: os.unlink(cleanup)
def to_hg(ifilename, ofilename, compression='bzip2'): """ Given a JSON file produced by this script, at ``ifilename``, convert it into an HG10UN file (Mercurial 1.0 uncompressed bundle) at ``ofilename``. """ magics = { 'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ' } try: magic = magics[compression] except KeyError: raise ValueError("unsupported compression type '{0}'".format( compression )) if ifilename: ifile = open(ifilename, 'r') else: ifile = sys.stdin iobj = json.load(ifile, encoding='ascii') writebundle(cStringIO.StringIO(pack_groups(iobj)), ofilename, magic)
def to_hg(ifilename, ofilename, compression='bzip2'): """ Given a JSON file produced by this script, at ``ifilename``, convert it into an HG10UN file (Mercurial 1.0 uncompressed bundle) at ``ofilename``. """ magics = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'} try: magic = magics[compression] except KeyError: raise ValueError( "unsupported compression type '{0}'".format(compression)) if ifilename: ifile = open(ifilename, 'r') else: ifile = sys.stdin iobj = json.load(ifile, encoding='ascii') writebundle(cStringIO.StringIO(pack_groups(iobj)), ofilename, magic)
def _bundle(repo, bases, heads, node, suffix, compress=True): """create a bundle with the specified revisions as a backup""" cg = repo.changegroupsubset(bases, heads, 'strip') backupdir = repo.join("strip-backup") if not os.path.isdir(backupdir): os.mkdir(backupdir) name = os.path.join(backupdir, "%s-%s.hg" % (short(node), suffix)) if compress: bundletype = "HG10BZ" else: bundletype = "HG10UN" return changegroup.writebundle(cg, name, bundletype)
def _bundle(repo, bases, heads, node, suffix, compress=True): """create a bundle with the specified revisions as a backup""" cg = changegroup.changegroupsubset(repo, bases, heads, 'strip') backupdir = "strip-backup" vfs = repo.vfs if not vfs.isdir(backupdir): vfs.mkdir(backupdir) name = "%s/%s-%s.hg" % (backupdir, short(node), suffix) if compress: bundletype = "HG10BZ" else: bundletype = "HG10UN" return changegroup.writebundle(cg, name, bundletype, vfs)
def getremotechanges(repo, url): sourcerepo = ui.expandpath(url) source = hg.repository(ui, sourcerepo) incoming = repo.findincoming(source, force=True) if not incoming: return (source, None, None) bundle = None if not source.local(): cg = source.changegroup(incoming, 'incoming') bundle = changegroup.writebundle(cg, None, 'HG10UN') source = bundlerepo.bundlerepository(ui, repo.root, bundle) return (source, incoming, bundle)
def getremotechanges(repo, url): sourcerepo = ui.expandpath(url) source = hg.repository(ui, sourcerepo) common, incoming, rheads = repo.findcommonincoming(source, force=True) if not incoming: return (source, None, None) bundle = None if not source.local(): if source.capable('changegroupsubset'): cg = source.changegroupsubset(incoming, rheads, 'incoming') else: cg = source.changegroup(incoming, 'incoming') bundle = changegroup.writebundle(cg, None, 'HG10UN') source = bundlerepo.bundlerepository(ui, repo.root, bundle) return (source, incoming, bundle)
def _bundle(repo, bases, heads, node, suffix, compress=True): """create a bundle with the specified revisions as a backup""" usebundle2 = (repo.ui.config('experimental', 'bundle2-exp') and repo.ui.config('experimental', 'strip-bundle2-version')) if usebundle2: cgversion = repo.ui.config('experimental', 'strip-bundle2-version') if cgversion not in changegroup.packermap: repo.ui.warn( _('unknown strip-bundle2-version value %r; ' + 'should be one of %r\n') % ( cgversion, sorted(changegroup.packermap.keys()), )) cgversion = '01' usebundle2 = False else: cgversion = '01' cg = changegroup.changegroupsubset(repo, bases, heads, 'strip', version=cgversion) backupdir = "strip-backup" vfs = repo.vfs if not vfs.isdir(backupdir): vfs.mkdir(backupdir) # Include a hash of all the nodes in the filename for uniqueness hexbases = (hex(n) for n in bases) hexheads = (hex(n) for n in heads) allcommits = repo.set('%ls::%ls', hexbases, hexheads) allhashes = sorted(c.hex() for c in allcommits) totalhash = util.sha1(''.join(allhashes)).hexdigest() name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix) if usebundle2: bundletype = "HG2Y" elif compress: bundletype = "HG10BZ" else: bundletype = "HG10UN" return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs)
def incoming(wdrepo, masterrepo): try: return wdrepo.findincoming(masterrepo) except AttributeError: from mercurial import hg, discovery revs, checkout = hg.addbranchrevs(wdrepo, masterrepo, ('', []), None) common, incoming, rheads = discovery.findcommonincoming( wdrepo, masterrepo, heads=revs) if not masterrepo.local(): from mercurial import bundlerepo, changegroup if revs is None and masterrepo.capable('changegroupsubset'): revs = rheads if revs is None: cg = masterrepo.changegroup(incoming, "incoming") else: cg = masterrepo.changegroupsubset(incoming, revs, 'incoming') fname = changegroup.writebundle(cg, None, "HG10UN") # use the created uncompressed bundlerepo masterrepo = bundlerepo.bundlerepository(wdrepo.ui, wdrepo.root, fname) return masterrepo.changelog.nodesbetween(incoming, revs)[0]
def rdiff(ui, repo, url, lrev=None, rrev=None, *pats, **opts): def rui(): try: return hg.remoteui(repo, opts) except AttributeError: # pre 1.6 return cmdutil.remoteui(repo, opts) try: other = getpeer(rui(), {}, url) except AttributeError: # pre-1.3 other = hg.repository(ui, url) cmdutil.setremoteconfig(ui, opts) ui.status(_('comparing with %s\n') % url) if rrev: if capable(other, 'lookup'): rrev = other.lookup(rrev) else: error = _( "Other repository doesn't support revision lookup, so a rev cannot be specified." ) raise util.Abort(error) incoming = findincomingfn(repo)(other, heads=rrev and [rrev] or []) if not incoming: # remote is a subset of local if not rrev: if capable(other, 'lookup'): rrev = other.lookup('tip') else: raise util.Abort(_('cannot determine remote tip')) other = repo bundle = None try: if incoming: # create a bundle (uncompressed if other repo is not local) if not rrev: cg = other.changegroup(incoming, "incoming") else: if not capable(other, 'changegroupsubset'): raise util.Abort( _("Partial incoming cannot be done because other repository doesn't support changegroupsubset." )) cg = other.changegroupsubset(incoming, rrev and [rrev] or [], 'incoming') bundle = changegroup.writebundle(cg, '', 'HG10UN') other = hg.repository(ui, bundle) if lrev: lrev = repo.changectx(lrev).node() rrev = other.changectx(rrev or 'tip').node() if opts['reverse']: lrev, rrev = rrev, lrev if not lrev: # bundle dirstate removed prior to hg 1.1 lrev = repo.dirstate.parents()[0] try: try: # scmutil.match expects a context not a repo m = scmutil.match(repo[None], pats, opts) except (ImportError, AttributeError): m = cmdutil.match(repo, pats, opts) chunks = patch.diff(other, lrev, rrev, match=m, opts=patch.diffopts(ui, opts)) for chunk in chunks: ui.write(chunk) except AttributeError: # 1.0 compatibility fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts) patch.diff(other, lrev, rrev, fns, match=matchfn, opts=patch.diffopts(ui, opts)) finally: if hasattr(other, 'close'): other.close() if bundle: os.unlink(bundle)
def rdiff(ui, repo, url, lrev=None, rrev=None, *pats, **opts): def rui(): try: return hg.remoteui(repo, opts) except AttributeError: # pre 1.6 return cmdutil.remoteui(repo, opts) try: other = hg.repository(rui(), url) except AttributeError: # pre-1.3 other = hg.repository(ui, url) cmdutil.setremoteconfig(ui, opts) ui.status(_('comparing with %s\n') % url) if rrev: if 'lookup' in other.capabilities: rrev = other.lookup(rrev) else: error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.") raise util.Abort(error) incoming = findincomingfn(repo)(other, heads=rrev and [rrev] or []) if not incoming: # remote is a subset of local if not rrev: if 'lookup' in other.capabilities: rrev = other.lookup('tip') else: raise util.Abort(_('cannot determine remote tip')) other = repo bundle = None try: if incoming: # create a bundle (uncompressed if other repo is not local) if not rrev: cg = other.changegroup(incoming, "incoming") else: if 'changegroupsubset' not in other.capabilities: raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset.")) cg = other.changegroupsubset(incoming, rrev and [rrev] or [], 'incoming') bundle = changegroup.writebundle(cg, '', 'HG10UN') other = hg.repository(ui, bundle) if lrev: lrev = repo.changectx(lrev).node() rrev = other.changectx(rrev or 'tip').node() if opts['reverse']: lrev, rrev = rrev, lrev if not lrev: # bundle dirstate removed prior to hg 1.1 lrev = repo.dirstate.parents()[0] try: m = cmdutil.match(repo, pats, opts) chunks = patch.diff(other, lrev, rrev, match=m, opts=patch.diffopts(ui, opts)) for chunk in chunks: ui.write(chunk) except AttributeError: # 1.0 compatibility fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts) patch.diff(other, lrev, rrev, fns, match=matchfn, opts=patch.diffopts(ui, opts)) finally: if hasattr(other, 'close'): other.close() if bundle: os.unlink(bundle)
def writebundle(self, cg): changegroup.writebundle(cg, self.fname, 'HG10UN', self.vfs)
def createcmd(ui, repo, pats, opts): """subcommand that creates a new shelve""" def publicancestors(ctx): """Compute the heads of the public ancestors of a commit. Much faster than the revset heads(ancestors(ctx) - draft())""" seen = set() visit = util.deque() visit.append(ctx) while visit: ctx = visit.popleft() for parent in ctx.parents(): rev = parent.rev() if rev not in seen: seen.add(rev) if parent.mutable(): visit.append(parent) else: yield parent.node() wctx = repo[None] parents = wctx.parents() if len(parents) > 1: raise util.Abort(_('cannot shelve while merging')) parent = parents[0] # we never need the user, so we use a generic user for all shelve operations user = '******' label = repo._bookmarkcurrent or parent.branch() or 'default' # slashes aren't allowed in filenames, therefore we rename it origlabel, label = label, label.replace('/', '_') def gennames(): yield label for i in xrange(1, 100): yield '%s-%02d' % (label, i) shelvedfiles = [] def commitfunc(ui, repo, message, match, opts): # check modified, added, removed, deleted only for flist in repo.status(match=match)[:4]: shelvedfiles.extend(flist) hasmq = util.safehasattr(repo, 'mq') if hasmq: saved, repo.mq.checkapplied = repo.mq.checkapplied, False try: return repo.commit(message, user, opts.get('date'), match) finally: if hasmq: repo.mq.checkapplied = saved if parent.node() != nullid: desc = parent.description().split('\n', 1)[0] else: desc = '(empty repository)' if not opts['message']: opts['message'] = desc name = opts['name'] wlock = lock = tr = bms = None try: wlock = repo.wlock() lock = repo.lock() bms = repo._bookmarks.copy() # use an uncommitted transaction to generate the bundle to avoid # pull races. ensure we don't print the abort message to stderr. tr = repo.transaction('commit', report=lambda x: None) if name: if shelvedfile(repo, name, 'hg').exists(): raise util.Abort(_("a shelved change named '%s' already exists") % name) else: for n in gennames(): if not shelvedfile(repo, n, 'hg').exists(): name = n break else: raise util.Abort(_("too many shelved changes named '%s'") % label) # ensure we are not creating a subdirectory or a hidden file if '/' in name or '\\' in name: raise util.Abort(_('shelved change names may not contain slashes')) if name.startswith('.'): raise util.Abort(_("shelved change names may not start with '.'")) node = cmdutil.commit(ui, repo, commitfunc, pats, opts) if not node: stat = repo.status(match=scmutil.match(repo[None], pats, opts)) if stat[3]: ui.status(_("nothing changed (%d missing files, see " "'hg status')\n") % len(stat[3])) else: ui.status(_("nothing changed\n")) return 1 phases.retractboundary(repo, phases.secret, [node]) fp = shelvedfile(repo, name, 'files').opener('wb') fp.write('\0'.join(shelvedfiles)) bases = list(publicancestors(repo[node])) cg = repo.changegroupsubset(bases, [node], 'shelve') changegroup.writebundle(cg, shelvedfile(repo, name, 'hg').filename(), 'HG10UN') cmdutil.export(repo, [node], fp=shelvedfile(repo, name, 'patch').opener('wb'), opts=mdiff.diffopts(git=True)) if ui.formatted(): desc = util.ellipsis(desc, ui.termwidth()) ui.status(_('shelved as %s\n') % name) hg.update(repo, parent.node()) finally: if bms: # restore old bookmarks repo._bookmarks.update(bms) repo._bookmarks.write() if tr: tr.abort() lockmod.release(lock, wlock)