def pull(self, remote, heads=None, force=False): result = super(bookmark_repo, self).pull(remote, heads, force) self.ui.debug("checking for updated bookmarks\n") rb = remote.listkeys('bookmarks') changed = False for k in rb.keys(): if k in self._bookmarks: nr, nl = rb[k], self._bookmarks[k] if nr in self: cr = self[nr] cl = self[nl] if cl.rev() >= cr.rev(): continue if cr in cl.descendants(): self._bookmarks[k] = cr.node() changed = True self.ui.status(_("updating bookmark %s\n") % k) else: self.ui.warn( _("not updating divergent" " bookmark %s\n") % k) if changed: write(repo) return result
def __init__(self, ui, path, rev=None): converter_source.__init__(self, ui, path, rev) self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False) self.ignored = set() self.saverev = ui.configbool('convert', 'hg.saverev', False) try: self.repo = hg.repository(self.ui, path) # try to provoke an exception if this isn't really a hg # repo, but some other bogus compatible-looking url if not self.repo.local(): raise error.RepoError() except error.RepoError: ui.traceback() raise NoRepo(_("%s is not a local Mercurial repository") % path) self.lastrev = None self.lastctx = None self._changescache = None self.convertfp = None # Restrict converted revisions to startrev descendants startnode = ui.config('convert', 'hg.startrev') if startnode is not None: try: startnode = self.repo.lookup(startnode) except error.RepoError: raise util.Abort( _('%s is not a valid start revision') % startnode) startrev = self.repo.changelog.rev(startnode) children = {startnode: 1} for rev in self.repo.changelog.descendants(startrev): children[self.repo.changelog.node(rev)] = 1 self.keep = children.__contains__ else: self.keep = util.always
def __init__(self, ui, path, rev=None): converter_source.__init__(self, ui, path, rev=rev) commandline.__init__(self, ui, 'darcs') # check for _darcs, ElementTree so that we can easily skip # test-convert-darcs if ElementTree is not around if not os.path.exists(os.path.join(path, '_darcs')): raise NoRepo(_("%s does not look like a darcs repository") % path) checktool('darcs') version = self.run0('--version').splitlines()[0].strip() if version < '2.1': raise util.Abort( _('darcs version 2.1 or newer needed (found %r)') % version) if ElementTree is None: raise util.Abort(_("Python ElementTree module is not available")) self.path = os.path.realpath(path) self.lastrev = None self.changes = {} self.parents = {} self.tags = {} # Check darcs repository format format = self.format() if format: if format in ('darcs-1.0', 'hashed'): raise NoRepo( _("%s repository format is unsupported, " "please upgrade") % format) else: self.ui.warn(_('failed to detect repository format!'))
def pull(oldpull, ui, repo, source="default", **opts): # translate bookmark args to rev args for actual pull if opts.get('bookmark'): # this is an unpleasant hack as pull will do this internally source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) other = hg.repository(hg.remoteui(repo, opts), source) rb = other.listkeys('bookmarks') for b in opts['bookmark']: if b not in rb: raise util.Abort(_('remote bookmark %s not found!') % b) opts.setdefault('rev', []).append(b) result = oldpull(ui, repo, source, **opts) # update specified bookmarks if opts.get('bookmark'): for b in opts['bookmark']: # explicit pull overrides local bookmark if any ui.status(_("importing bookmark %s\n") % b) repo._bookmarks[b] = repo[rb[b]].node() write(repo) return result
def __init__(self, ui, path, rev=None): super(gnuarch_source, self).__init__(ui, path, rev=rev) if not os.path.exists(os.path.join(path, '{arch}')): raise NoRepo(_("%s does not look like a GNU Arch repository") % path) # Could use checktool, but we want to check for baz or tla. self.execmd = None if util.find_exe('baz'): self.execmd = 'baz' else: if util.find_exe('tla'): self.execmd = 'tla' else: raise util.Abort(_('cannot find a GNU Arch tool')) commandline.__init__(self, ui, self.execmd) self.path = os.path.realpath(path) self.tmppath = None self.treeversion = None self.lastrev = None self.changes = {} self.parents = {} self.tags = {} self.catlogparser = Parser() self.encoding = encoding.encoding self.archives = []
def collect(src, ui): seplen = len(os.path.sep) candidates = [] live = len(src['tip'].manifest()) # Your average repository has some files which were deleted before # the tip revision. We account for that by assuming that there are # 3 tracked files for every 2 live files as of the tip version of # the repository. # # mozilla-central as of 2010-06-10 had a ratio of just over 7:5. total = live * 3 // 2 src = src.store.path pos = 0 ui.status( _("tip has %d files, estimated total number of files: %s\n") % (live, total)) for dirpath, dirnames, filenames in os.walk(src): dirnames.sort() relpath = dirpath[len(src) + seplen:] for filename in sorted(filenames): if not filename[-2:] in ('.d', '.i'): continue st = os.stat(os.path.join(dirpath, filename)) if not stat.S_ISREG(st.st_mode): continue pos += 1 candidates.append((os.path.join(relpath, filename), st)) ui.progress(_('collecting'), pos, filename, _('files'), total) ui.progress(_('collecting'), None) ui.status(_('collected %d candidate storage files\n') % len(candidates)) return candidates
def prune(candidates, src, dst, ui): def linkfilter(src, dst, st): try: ts = os.stat(dst) except OSError: # Destination doesn't have this file? return False if util.samefile(src, dst): return False if not util.samedevice(src, dst): # No point in continuing raise util.Abort( _('source and destination are on different devices')) if st.st_size != ts.st_size: return False return st targets = [] total = len(candidates) pos = 0 for fn, st in candidates: pos += 1 srcpath = os.path.join(src, fn) tgt = os.path.join(dst, fn) ts = linkfilter(srcpath, tgt, st) if not ts: ui.debug(_('not linkable: %s\n') % fn) continue targets.append((fn, ts.st_size)) ui.progress(_('pruning'), pos, fn, _('files'), total) ui.progress(_('pruning'), None) ui.status( _('pruned down to %d probably relinkable files\n') % len(targets)) return targets
def catfile(ui, repo, type=None, r=None, **opts): """cat a specific revision""" # in stdin mode, every line except the commit is prefixed with two # spaces. This way the our caller can find the commit without magic # strings # prefix = "" if opts['stdin']: try: (type, r) = raw_input().split(' ') prefix = " " except EOFError: return else: if not type or not r: ui.warn(_("cat-file: type or revision not supplied\n")) commands.help_(ui, 'cat-file') while r: if type != "commit": ui.warn(_("aborting hg cat-file only understands commits\n")) return 1 n = repo.lookup(r) catcommit(ui, repo, n, prefix) if opts['stdin']: try: (type, r) = raw_input().split(' ') except EOFError: break else: break
def readauthormap(self, authorfile): afile = open(authorfile, 'r') for line in afile: line = line.strip() if not line or line.startswith('#'): continue try: srcauthor, dstauthor = line.split('=', 1) except ValueError: msg = _('Ignoring bad line in author map file %s: %s\n') self.ui.warn(msg % (authorfile, line.rstrip())) continue srcauthor = srcauthor.strip() dstauthor = dstauthor.strip() if self.authors.get(srcauthor) in (None, dstauthor): msg = _('mapping author %s to %s\n') self.ui.debug(msg % (srcauthor, dstauthor)) self.authors[srcauthor] = dstauthor continue m = _('overriding mapping for author %s, was %s, will be %s\n') self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor)) afile.close()
def recover(self, repo): '''commit working directory using journal metadata''' node, user, date, message, parents = self.readlog() merge = len(parents) == 2 if not user or not date or not message or not parents[0]: raise util.Abort(_('transplant log file is corrupt')) extra = {'transplant_source': node} wlock = repo.wlock() try: p1, p2 = repo.dirstate.parents() if p1 != parents[0]: raise util.Abort( _('working dir not at transplant parent %s') % revlog.hex(parents[0])) if merge: repo.dirstate.setparents(p1, parents[1]) n = repo.commit(message, user, date, extra=extra) if not n: raise util.Abort(_('commit failed')) if not merge: self.transplants.set(n, node) self.unlog() return n, node finally: wlock.release()
def decorated_function(self, *args): try: return function(self, *args) except (OSError, socket.error), err: autostart = self.ui.configbool('inotify', 'autostart', True) if err.args[0] == errno.ECONNREFUSED: self.ui.warn( _('inotify-client: found dead inotify server ' 'socket; removing it\n')) os.unlink(os.path.join(self.root, '.hg', 'inotify.sock')) if err.args[0] in (errno.ECONNREFUSED, errno.ENOENT) and autostart: try: try: server.start(self.ui, self.dirstate, self.root, dict(daemon=True, daemon_pipefds='')) except server.AlreadyStartedException, inst: # another process may have started its own # inotify server while this one was starting. self.ui.debug(str(inst)) except Exception, inst: self.ui.warn( _('inotify-client: could not start inotify ' 'server: %s\n') % inst) else: try: return function(self, *args) except socket.error, err: self.ui.warn( _('inotify-client: could not talk to new ' 'inotify server: %s\n') % err.args[-1])
def getkeys(ui, repo, mygpg, sigdata, context): """get the keys who signed a data""" fn, ln = context node, version, sig = sigdata prefix = "%s:%d" % (fn, ln) node = hgnode.bin(node) data = node2txt(repo, node, version) sig = binascii.a2b_base64(sig) err, keys = mygpg.verify(data, sig) if err: ui.warn("%s:%d %s\n" % (fn, ln, err)) return None validkeys = [] # warn for expired key and/or sigs for key in keys: if key[0] == "BADSIG": ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2])) continue if key[0] == "EXPSIG": ui.write( _("%s Note: Signature has expired" " (signed by: \"%s\")\n") % (prefix, key[2])) elif key[0] == "EXPKEYSIG": ui.write( _("%s Note: This key has expired" " (signed by: \"%s\")\n") % (prefix, key[2])) validkeys.append((key[1], key[2], key[3])) return validkeys
def notify(self, ids, committer): '''tell bugzilla to send mail.''' self.ui.status(_('telling bugzilla to send mail:\n')) (user, userid) = self.get_bugzilla_user(committer) for id in ids: self.ui.status(_(' bug %s\n') % id) cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify) bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla') try: # Backwards-compatible with old notify string, which # took one string. This will throw with a new format # string. cmd = cmdfmt % id except TypeError: cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user} self.ui.note(_('running notify command %s\n') % cmd) fp = util.popen('(%s) 2>&1' % cmd) out = fp.read() ret = fp.close() if ret: self.ui.warn(out) raise util.Abort(_('bugzilla notify command %s') % util.explain_exit(ret)[0]) self.ui.status(_('done\n'))
def run(self, *args, **kwargs): '''run a query.''' self.ui.note(_('query: %s %s\n') % (args, kwargs)) try: self.cursor.execute(*args, **kwargs) except MySQLdb.MySQLError: self.ui.note(_('failed query: %s %s\n') % (args, kwargs)) raise
def _connect(self): root = self.cvsroot conntype = None user, host = None, None cmd = ['cvs', 'server'] self.ui.status(_("connecting to %s\n") % root) if root.startswith(":pserver:"): root = root[9:] m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)', root) if m: conntype = "pserver" user, passw, serv, port, root = m.groups() if not user: user = "******" if not port: port = 2401 else: port = int(port) format0 = ":pserver:%s@%s:%s" % (user, serv, root) format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root) if not passw: passw = "A" cvspass = os.path.expanduser("~/.cvspass") try: pf = open(cvspass) for line in pf.read().splitlines(): part1, part2 = line.split(' ', 1) if part1 == '/1': # /1 :pserver:[email protected]:2401/cvsroot/foo Ah<Z part1, part2 = part2.split(' ', 1) format = format1 else: # :pserver:[email protected]:/cvsroot/foo Ah<Z format = format0 if part1 == format: passw = part2 break pf.close() except IOError, inst: if inst.errno != errno.ENOENT: if not getattr(inst, 'filename', None): inst.filename = cvspass raise sck = socket.socket() sck.connect((serv, port)) sck.send("\n".join([ "BEGIN AUTH REQUEST", root, user, passw, "END AUTH REQUEST", "" ])) if sck.recv(128) != "I LOVE YOU\n": raise util.Abort(_("CVS pserver authentication failed")) self.writep = self.readp = sck.makefile('r+')
def rawentries(subdir="", **map): descend = self.ui.configbool('web', 'descend', True) for name, path in self.repos: if not name.startswith(subdir): continue name = name[len(subdir):] if not descend and '/' in name: continue u = self.ui.copy() try: u.readconfig(os.path.join(path, '.hg', 'hgrc')) except Exception, e: u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) continue def get(section, name, default=None): return u.config(section, name, default, untrusted=True) if u.configbool("web", "hidden", untrusted=True): continue if not self.read_allowed(u, req): continue parts = [name] if 'PATH_INFO' in req.env: parts.insert(0, req.env['PATH_INFO'].rstrip('/')) if req.env['SCRIPT_NAME']: parts.insert(0, req.env['SCRIPT_NAME']) url = re.sub(r'/+', '/', '/'.join(parts) + '/') # update time with local timezone try: r = hg.repository(self.ui, path) except error.RepoError: u.warn(_('error accessing repository at %s\n') % path) continue try: d = (get_mtime(r.spath), util.makedate()[1]) except OSError: continue contact = get_contact(get) description = get("web", "description", "") name = get("web", "name", name) row = dict(contact=contact or "unknown", contact_sort=contact.upper() or "unknown", name=name, name_sort=name, url=url, description=description or "unknown", description_sort=description.upper() or "unknown", lastchange=d, lastchange_sort=d[1]-d[0], archives=archivelist(u, "tip", url)) yield row
def extsetup(ui): commands.globalopts.append(( '', 'color', 'auto', # i18n: 'always', 'auto', and 'never' are keywords and should # not be translated _("when to colorize (boolean, always, auto, or never)"), _('TYPE')))
def convertsink(ui, path, type): if type and type not in [s[0] for s in sink_converters]: raise util.Abort(_('%s: invalid destination repository type') % type) for name, sink in sink_converters: try: if not type or name == type: return sink(ui, path) except NoRepo, inst: ui.note(_("convert: %s\n") % inst)
def hook(ui, repo, hooktype, node=None, source=None, **kwargs): if hooktype not in ['pretxnchangegroup', 'pretxncommit']: raise util.Abort( _('config error - hook type "%s" cannot stop ' 'incoming changesets nor commits') % hooktype) if (hooktype == 'pretxnchangegroup' and source not in ui.config('acl', 'sources', 'serve').split()): ui.debug('acl: changes have source "%s" - skipping\n' % source) return user = None if source == 'serve' and 'url' in kwargs: url = kwargs['url'].split(':') if url[0] == 'remote' and url[1].startswith('http'): user = urllib.unquote(url[3]) if user is None: user = getpass.getuser() cfg = ui.config('acl', 'config') if cfg: ui.readconfig(cfg, sections=[ 'acl.groups', 'acl.allow.branches', 'acl.deny.branches', 'acl.allow', 'acl.deny' ]) allowbranches = buildmatch(ui, None, user, 'acl.allow.branches') denybranches = buildmatch(ui, None, user, 'acl.deny.branches') allow = buildmatch(ui, repo, user, 'acl.allow') deny = buildmatch(ui, repo, user, 'acl.deny') for rev in xrange(repo[node], len(repo)): ctx = repo[rev] branch = ctx.branch() if denybranches and denybranches(branch): raise util.Abort( _('acl: user "%s" denied on branch "%s"' ' (changeset "%s")') % (user, branch, ctx)) if allowbranches and not allowbranches(branch): raise util.Abort( _('acl: user "%s" not allowed on branch "%s"' ' (changeset "%s")') % (user, branch, ctx)) ui.debug('acl: branch access granted: "%s" on branch "%s"\n' % (ctx, branch)) for f in ctx.files(): if deny and deny(f): ui.debug('acl: user %s denied on %s\n' % (user, f)) raise util.Abort( _('acl: access denied for changeset %s') % ctx) if allow and not allow(f): ui.debug('acl: user %s not allowed on %s\n' % (user, f)) raise util.Abort( _('acl: access denied for changeset %s') % ctx) ui.debug('acl: allowing changeset %s\n' % ctx)
def _status(ui, repo, kwt, *pats, **opts): '''Bails out if [keyword] configuration is not active. Returns status of working directory.''' if kwt: return repo.status(match=cmdutil.match(repo, pats, opts), clean=True, unknown=opts.get('unknown') or opts.get('all')) if ui.configitems('keyword'): raise util.Abort(_('[keyword] patterns cannot match')) raise util.Abort(_('no [keyword] patterns configured'))
def recordfunc(ui, repo, message, match, opts): """This is generic record driver. Its job is to interactively filter local changes, and accordingly prepare working dir into a state, where the job can be delegated to non-interactive commit command such as 'commit' or 'qrefresh'. After the actual job is done by non-interactive command, working dir state is restored to original. In the end we'll record interesting changes, and everything else will be left in place, so the user can continue his work. """ merge = len(repo[None].parents()) > 1 if merge: raise util.Abort( _('cannot partially commit a merge ' '(use hg commit instead)')) changes = repo.status(match=match)[:3] diffopts = mdiff.diffopts(git=True, nodates=True) chunks = patch.diff(repo, changes=changes, opts=diffopts) fp = cStringIO.StringIO() fp.write(''.join(chunks)) fp.seek(0) # 1. filter patch, so we have intending-to apply subset of it chunks = filterpatch(ui, parsepatch(fp)) del fp contenders = set() for h in chunks: try: contenders.update(set(h.files())) except AttributeError: pass changed = changes[0] + changes[1] + changes[2] newfiles = [f for f in changed if f in contenders] if not newfiles: ui.status(_('no changes to record\n')) return 0 modified = set(changes[0]) # 2. backup changed files, so we can restore them in the end backups = {} backupdir = repo.join('record-backups') try: os.mkdir(backupdir) except OSError, err: if err.errno != errno.EEXIST: raise
def getfile(self, name, rev): def chunkedread(fp, count): # file-objects returned by socked.makefile() do not handle # large read() requests very well. chunksize = 65536 output = StringIO() while count > 0: data = fp.read(min(count, chunksize)) if not data: raise util.Abort( _("%d bytes missing from remote file") % count) count -= len(data) output.write(data) return output.getvalue() self._parse() if rev.endswith("(DEAD)"): raise IOError args = ("-N -P -kk -r %s --" % rev).split() args.append(self.cvsrepo + '/' + name) for x in args: self.writep.write("Argument %s\n" % x) self.writep.write("Directory .\n%s\nco\n" % self.realroot) self.writep.flush() data = "" mode = None while 1: line = self.readp.readline() if line.startswith("Created ") or line.startswith("Updated "): self.readp.readline() # path self.readp.readline() # entries mode = self.readp.readline()[:-1] count = int(self.readp.readline()[:-1]) data = chunkedread(self.readp, count) elif line.startswith(" "): data += line[1:] elif line.startswith("M "): pass elif line.startswith("Mbinary "): count = int(self.readp.readline()[:-1]) data = chunkedread(self.readp, count) else: if line == "ok\n": if mode is None: raise util.Abort(_('malformed response from CVS')) return (data, "x" in mode and "x" or "") elif line.startswith("E "): self.ui.warn(_("cvs server: %s\n") % line[2:]) elif line.startswith("Remove"): self.readp.readline() else: raise util.Abort(_("unknown CVS response: %s") % line)
def remove(remove_func, name): if act: try: remove_func(repo.wjoin(name)) except OSError: m = _('%s cannot be removed') % name if opts['abort_on_err']: raise util.Abort(m) ui.warn(_('warning: %s\n') % m) else: ui.write('%s%s' % (name, eol))
def prompt(ui, prompt, default=None, rest=':'): if not ui.interactive() and default is None: raise util.Abort(_("%s Please enter a valid value" % (prompt + rest))) if default: prompt += ' [%s]' % default prompt += rest while True: r = ui.prompt(prompt, default=default) if r: return r if default is not None: return default ui.warn(_('Please enter a valid value.\n'))
def getcfgpath(name, rev): cfgpath = self.ui.config('convert', 'svn.' + name) if cfgpath is not None and cfgpath.strip() == '': return None path = (cfgpath or name).strip('/') if not self.exists(path, rev): if cfgpath: raise util.Abort( _('expected %s to be at %r, but not found') % (name, path)) return None self.ui.note(_('found %s at %r\n') % (name, path)) return path
def __init__(self, ui, path): converter_sink.__init__(self, ui, path) commandline.__init__(self, ui, 'svn') self.delete = [] self.setexec = [] self.delexec = [] self.copies = [] self.wc = None self.cwd = os.getcwd() path = os.path.realpath(path) created = False if os.path.isfile(os.path.join(path, '.svn', 'entries')): self.wc = path self.run0('update') else: wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc') if os.path.isdir(os.path.dirname(path)): if not os.path.exists(os.path.join(path, 'db', 'fs-type')): ui.status( _('initializing svn repository %r\n') % os.path.basename(path)) commandline(ui, 'svnadmin').run0('create', path) created = path path = util.normpath(path) if not path.startswith('/'): path = '/' + path path = 'file://' + path ui.status( _('initializing svn working copy %r\n') % os.path.basename(wcpath)) self.run0('checkout', path, wcpath) self.wc = wcpath self.opener = util.opener(self.wc) self.wopener = util.opener(self.wc) self.childmap = mapfile(ui, self.join('hg-childmap')) self.is_exec = util.checkexec(self.wc) and util.is_exec or None if created: hook = os.path.join(created, 'hooks', 'pre-revprop-change') fp = open(hook, 'w') fp.write(pre_revprop_change) fp.close() util.set_flags(hook, False, True) xport = transport.SvnRaTransport(url=geturl(path)) self.uuid = svn.ra.get_uuid(xport.ra)
def diffbookmarks(ui, repo, remote): ui.status(_("searching for changed bookmarks\n")) lmarks = repo.listkeys('bookmarks') rmarks = remote.listkeys('bookmarks') diff = sorted(set(rmarks) - set(lmarks)) for k in diff: ui.write(" %-25s %s\n" % (k, rmarks[k][:12])) if len(diff) <= 0: ui.status(_("no changed bookmarks found\n")) return 1 return 0
def getoutgoing(dest, revs): '''Return the revisions present locally but not in dest''' dest = ui.expandpath(dest or 'default-push', dest or 'default') dest, branches = hg.parseurl(dest) revs, checkout = hg.addbranchrevs(repo, repo, branches, revs) if revs: revs = [repo.lookup(rev) for rev in revs] other = hg.repository(hg.remoteui(repo, opts), dest) ui.status(_('comparing with %s\n') % url.hidepassword(dest)) o = discovery.findoutgoing(repo, other) if not o: ui.status(_("no changes found\n")) return [] o = repo.changelog.nodesbetween(o, revs)[0] return [str(repo.changelog.rev(r)) for r in o]
def handle_pollevents(self, events): if self.ui.debugflag: self.ui.note( _('%s readable: %d bytes\n') % (self.event_time(), self.threshold.readable())) if not self.threshold(): if self.registered: if self.ui.debugflag: self.ui.note( _('%s below threshold - unhooking\n') % (self.event_time())) self.unregister() self.timeout = 250 else: self.read_events()
def _kwfwrite(ui, repo, expand, *pats, **opts): '''Selects files and passes them to kwtemplater.overwrite.''' wctx = repo[None] if len(wctx.parents()) > 1: raise util.Abort(_('outstanding uncommitted merge')) kwt = kwtools['templater'] wlock = repo.wlock() try: status = _status(ui, repo, kwt, *pats, **opts) modified, added, removed, deleted, unknown, ignored, clean = status if modified or added or removed or deleted: raise util.Abort(_('outstanding uncommitted changes')) kwt.overwrite(wctx, clean, True, expand) finally: wlock.release()