def validate_hg_url(self, url): error = _("Invalid Hg URL: '%s'") % url source, branches = hg.parseurl(url) try: hg.repository(ui.ui(), source) except RepoError: raise forms.ValidationError(error)
def overview(ui, repo, source=None, **opts): '''provides a general overview of your repository state This command combines the output of the hg incomng, hg outgoing, hg status, and hg id commands into an easily human-readable explanation of the entire state of your current working repository. ''' if not repo: return originurl = ui.expandpath(source or 'default') targeturl = ui.expandpath(source or 'default-push', source or 'default') origin, hashbranch = parseurl(originurl) origin = hg.repository(remoteui(repo, opts), origin) target, hashbranch = parseurl(targeturl) target = hg.repository(remoteui(repo, opts), target) if originurl == targeturl: ui.status(_('parent repository: %s\n') % url.hidepassword(originurl)) else: ui.status(_('source repository: %s\n') % url.hidepassword(getattr(origin, 'root', origin.url()))) ui.status(_('destination repository: %s\n') % url.hidepassword(getattr(target, 'root', target.url()))) ui.pushbuffer() out = outgoing(repo, target) inc = incoming(repo, origin, filter(bool, [hashbranch])) ui.popbuffer() changed = any(repo.status()) if changed: status = _('uncommitted changes') else: status = _('working copy up-to-date') # grab heads heads = repo.branchheads(None, closed=False) if len(heads) > 1: merge = 'merge required' else: merge = '' ui.status(_('| Remote | << %s | Local | %s\n') % (str(len(out)).center(5), merge)) ui.status(_('| Repository | %s >> | Repository | %s\n') % (str(len(inc)).center(5), status)) if opts['detail']: if len(out) > 0: ui.status(_('\noutgoing changes:\n')) for rev in out: ui.status('%s %s\n' % (repo[rev], repo[rev].description().strip().split('\n')[0])) if len(inc) > 0: ui.status(_('\nincoming changes:\n')) for rev in inc: ui.status('%s %s\n' % (repo[rev], repo[rev].description().strip().split('\n')[0])) if changed: ui.status(_('\nlocal files:\n')) ui.pushbuffer() commands.status(ui, repo, '', **opts) status = ui.popbuffer() for l in status.splitlines(): print ' %s' % l
def launch(root='', files=[], cwd='', main=True): u = ui.ui() u.updateopts(debug=False, traceback=False) repo = hg.repository(u, path=root) # move cwd to repo root if repo is merged, so we can show # all the changed files if len(repo.workingctx().parents()) > 1 and repo.root != cwd: cwd = repo.root repo = hg.repository(u, path=cwd) files = [cwd] ct = repo.ui.config('tortoisehg', 'commit', 'internal') if ct not in ['', 'internal']: from hglib import thgdispatch args = ['--repository', root, ct] try: ret = thgdispatch(repo.ui, args=args) except SystemExit: pass return None cmdoptions = { 'user':'', 'date':'', 'modified':True, 'added':True, 'removed':True, 'deleted':True, 'unknown':False, 'ignored':False, 'exclude':[], 'include':[], 'check': False, 'git':False, 'logfile':'', 'addremove':False, } dialog = GCommit(u, repo, cwd, files, cmdoptions, main) dialog.display() return dialog
def __init__(self, ui, path): common.converter_sink.__init__(self, ui, path) self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True) self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False) self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default') self.lastbranch = None if os.path.isdir(path) and len(os.listdir(path)) > 0: try: self.repo = hg.repository(self.ui, path) if not self.repo.local(): raise NoRepo(_('%s is not a local Mercurial repository') % path) except error.RepoError as err: ui.traceback() raise NoRepo(err.args[0]) else: try: ui.status(_('initializing destination %s repository\n') % path) self.repo = hg.repository(self.ui, path, create=True) if not self.repo.local(): raise NoRepo(_('%s is not a local Mercurial repository') % path) self.created.append(path) except error.RepoError: ui.traceback() raise NoRepo(_("could not create hg repository %s as sink") % path) self.lock = None self.wlock = None self.filemapmode = False self.subrevmaps = {}
def get_repo(url, alias): global peer myui = ui.ui() myui.setconfig('ui', 'interactive', 'off') myui.fout = sys.stderr if get_config_bool('remote-hg.insecure'): myui.setconfig('web', 'cacerts', '') extensions.loadall(myui) if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'): repo = hg.repository(myui, url) if not os.path.exists(dirname): os.makedirs(dirname) else: shared_path = os.path.join(gitdir, 'hg') # check and upgrade old organization hg_path = os.path.join(shared_path, '.hg') if os.path.exists(shared_path) and not os.path.exists(hg_path): repos = os.listdir(shared_path) for x in repos: local_hg = os.path.join(shared_path, x, 'clone', '.hg') if not os.path.exists(local_hg): continue if not os.path.exists(hg_path): shutil.move(local_hg, hg_path) shutil.rmtree(os.path.join(shared_path, x, 'clone')) # setup shared repo (if not there) try: hg.peer(myui, {}, shared_path, create=True) except error.RepoError: pass if not os.path.exists(dirname): os.makedirs(dirname) local_path = os.path.join(dirname, 'clone') if not os.path.exists(local_path): hg.share(myui, shared_path, local_path, update=False) else: # make sure the shared path is always up-to-date util.writefile(os.path.join(local_path, '.hg', 'sharedpath'), hg_path) repo = hg.repository(myui, local_path) try: peer = hg.peer(myui, {}, url) except: die('Repository error') repo.pull(peer, heads=None, force=True) updatebookmarks(repo, peer) return repo
def create_queue(repo, qname): if not has_queue(repo, qname): name = qname[8:] if qname != 'patches' else 'patches' fh = repo.opener('patches.queues', 'a') fh.write('%s\n' % (name,)) fh.close() path = os.path.join(repo.path, qname) if not os.path.exists(path): hg.repository(repo.ui, path, create=True)
def get_repo_for_repository(app, repository=None, repo_path=None): # Import from mercurial here to let Galaxy start under Python 3 from mercurial import ( hg, ui ) if repository is not None: return hg.repository(ui.ui(), repository.repo_path(app)) if repo_path is not None: return hg.repository(ui.ui(), repo_path)
def test_diff_base_against_clone(self): """Test that the right error is raised on trying to do a diff across a different divergant clone""" ui = mock_ui() orig = os.path.join(settings.REPOSITORY_BASE, "orig") clone = os.path.join(settings.REPOSITORY_BASE, "clone") hgcommands.init(ui, orig) hgorig = repository(ui, orig) ( open(hgorig.pathto("file.dtd"), "w").write( """ <!ENTITY old "content we will delete"> <!ENTITY mod "this has stuff to keep and delete"> """ ) ) hgcommands.addremove(ui, hgorig) hgcommands.commit(ui, hgorig, user="******", message="initial commit") assert len(hgorig) == 1 # 1 commit # set up a second repo called 'clone' hgcommands.clone(ui, orig, clone) hgclone = repository(ui, clone) # new commit on base ( open(hgorig.pathto("file.dtd"), "w").write( """ <!ENTITY mod "this has stuff to keep and add"> <!ENTITY new "this has stuff that is new"> """ ) ) hgcommands.commit(ui, hgorig, user="******", message="second commit on base") assert len(hgorig) == 2 # 2 commits rev_from = hgorig[1].hex() # different commit on clone ( open(hgclone.pathto("file.dtd"), "w").write( """ <!ENTITY mod "this has stuff to keep and change"> <!ENTITY new_in_clone "this has stuff that is different from base"> """ ) ) hgcommands.commit(ui, hgclone, user="******", message="a different commit on clone") rev_to = hgclone[1].hex() Repository.objects.create(name="orig", url="http://localhost:8001/orig/") Repository.objects.create(name="clone", url="http://localhost:8001/clone/") url = reverse("pushes.views.diff") # right now, we can't diff between repos, this might change! self.assertRaises(RepoError, self.client.get, url, {"repo": "clone", "from": rev_from[:12], "to": rev_to[:12]})
def _local_repo(self): if self.key not in revision._state.repositories: if not os.path.exists(self.local): try: os.makedirs(self.local) revision._state.repositories[self.key] = hg.repository(self._ui, self.local, create=True) except error.RepoError: pass if self.key not in revision._state.repositories: revision._state.repositories[self.key] = hg.repository(self._ui, self.local) return revision._state.repositories[self.key]
def hgproto_init(repo, proto): """An hg protocol command handler that creates a new repository. This gets bound to the 'init' command.""" virtual = proto.req.env.get("PATH_INFO", "").strip("/") paths = {} for name, value in repo.ui.configitems("paths"): paths[name] = value local = local_path_for_repo(virtual, paths) hg.repository(repo.ui, path=local, create=True)
def init(self): dest = self.getPath() if dest == '': qtlib.ErrorMsgBox(_('Error executing init'), _('Destination path is empty'), _('Please enter the directory path')) self.dest_edit.setFocus() return False dest = os.path.normpath(dest) self.dest_edit.setText(hglib.tounicode(dest)) udest = self.dest_edit.text() if not os.path.exists(dest): p = dest l = 0 while not os.path.exists(p): l += 1 p, t = os.path.split(p) if not t: break # already root path if l > 1: res = qtlib.QuestionMsgBox(_('Init'), _('Are you sure about adding the new repository ' '%d extra levels deep?') % l, _('Path exists up to:\n%s\nand you asked for:\n%s') % (p, udest), defaultbutton=QMessageBox.No) if not res: self.dest_edit.setFocus() return try: # create the folder, just like Hg would os.makedirs(dest) except: qtlib.ErrorMsgBox(_('Error executing init'), _('Cannot create folder %s') % udest) return False _ui = ui.ui() # dotencode is the new default repo format in Mercurial 1.7 if self.make_pre_1_7_chk.isChecked(): _ui.setconfig('format', 'dotencode', 'False') try: # create the new repo hg.repository(_ui, dest, create=1) except error.RepoError, inst: qtlib.ErrorMsgBox(_('Error executing init'), _('Unable to create new repository'), hglib.tounicode(str(inst))) return False
def create(self): if(self.destinationField.stringValue() is not None): try: hg.repository(cmdutil.remoteui(ui.ui(), {}), self.destinationField.stringValue(), create=1) NSAlert.alertWithMessageText_defaultButton_alternateButton_otherButton_informativeTextWithFormat_( NSLocalizedString("ERROR", None), "Ok", None, None, NSLocalizedString("Repository created sucessfully!", None)).runModal() except: NSAlert.alertWithMessageText_defaultButton_alternateButton_otherButton_informativeTextWithFormat_( NSLocalizedString("ERROR", None), "Ok", None, None, NSLocalizedString("An error occurred while creating Mercurial repository!", None)).runModal() import traceback traceback.print_exc() return ERROR(u'Exception, see traceback')
def __init__(self, parent=None): QtCore.QThread.__init__(self, parent) self.logger = logging.getLogger('__main__') self.info = lambda msg : self.logger.info(msg) self.debug = lambda msg : self.logger.debug(msg) self.ui = ui.ui() self.url = 'https://open-ihm.googlecode.com/hg/' try: self.repo = hg.repository(self.ui, REPO_DIR) except Exception: self.repo = hg.repository(self.ui, REPO_DIR, create=True) return
def handlePushes(repo_id, submits, do_update=True): if not submits: return repo = Repository.objects.get(id=repo_id) revisions = reduce(lambda r,l: r+l, [p.changesets for p in submits], []) ui = _ui() repopath = os.path.join(settings.REPOSITORY_BASE, repo.name) configpath = os.path.join(repopath, '.hg', 'hgrc') if not os.path.isfile(configpath): if not os.path.isdir(os.path.dirname(repopath)): os.makedirs(os.path.dirname(repopath)) clone(ui, str(repo.url), str(repopath), pull=False, uncompressed=False, rev=[], noupdate=False) cfg = open(configpath, 'a') cfg.write('default-push = ssh%s\n' % str(repo.url)[4:]) cfg.close() ui.readconfig(configpath) hgrepo = repository(ui, repopath) else: ui.readconfig(configpath) hgrepo = repository(ui, repopath) cs = submits[-1].changesets[-1] try: hgrepo.changectx(cs) except RepoError: pull(ui, hgrepo, source = str(repo.url), force=False, update=False, rev=[]) if do_update: update(ui, hgrepo) for data in submits: changesets = [] for revision in data.changesets: try: cs = getChangeset(repo, hgrepo, revision) transaction.commit() changesets.append(cs) except Exception, e: transaction.rollback() raise print repo.name, e p = Push.objects.create(repository = repo, push_id = data.id, user = data.user, push_date = datetime.utcfromtimestamp(data.date)) p.changesets = changesets p.save() transaction.commit()
def __init__(self, path, username): uio = ui.ui() uio.quiet = True if not os.environ.get('HGUSER') and not uio.config("ui", "username"): os.environ['HGUSER'] = username try: repo = hg.repository(ui=uio, path=path) except: repo = hg.repository(ui=uio, path=path, create=True) hgignore = os.path.join(path, '.hgignore') if not os.path.exists(hgignore): open(hgignore, 'w').write(_hgignore_content) self.repo = repo self.decode = None
def initial(self, prefix): """ Set up the hg repo at ``self.location``, which usually is set in ``settings.HG_REPO_PATH``. """ if not os.path.exists(self.location): os.makedirs(self.location) u = self.hg_ui try: repo = hg.repository(u, self.location, create=1) except error.RepoError: repo = hg.repository(u, self.location) except Exception, e: raise
def setup_repo(url): try: myui=ui.ui(interactive=False) except TypeError: myui=ui.ui() myui.setconfig('ui', 'interactive', 'off') return myui,hg.repository(myui,url)
def main(): rootdir = os.path.realpath(__file__ + '/../..') os.chdir(rootdir) if not os.path.isdir(NEWSDIR): sys.exit("Can't find news directory") repo = hg.repository(ui.ui(), '.') args = sys.argv[1:] if args: command = args.pop(0) else: command = 'preview' if command == 'generate': generate(repo) elif command == 'preview': out, htmlOut, _ = preview(repo) print 'Text Version:\n' for line in out: print line print 'Html Version:\n' for line in htmlOut: print line else: sys.exit("Usage: %s <preview|generate>" % sys.argv[0])
def run(hgcmd='add', root='', cwd='', files=[], **opts): u = ui.ui() u.updateopts(debug=False, traceback=False) repo = hg.repository(u, path=root) cmdoptions = { 'all':False, 'clean':False, 'ignored':False, 'modified':False, 'added':True, 'removed':True, 'deleted':True, 'unknown':False, 'rev':[], 'exclude':[], 'include':[], 'debug':True,'verbose':True } if hgcmd == 'add': cmdoptions['unknown'] = True elif hgcmd == 'remove': cmdoptions['clean'] = True else: raise "Invalid command '%s'" % hgcmd dialog = GStatus(u, repo, cwd, files, cmdoptions, True) gtk.gdk.threads_init() gtk.gdk.threads_enter() dialog.display() gtk.main() gtk.gdk.threads_leave()
def get_rev_label_changeset_revision_from_repository_metadata( app, repository_metadata, repository=None, include_date=True, include_hash=True ): if repository is None: repository = repository_metadata.repository repo = hg.repository( get_configured_ui(), repository.repo_path( app ) ) changeset_revision = repository_metadata.changeset_revision ctx = get_changectx_for_changeset( repo, changeset_revision ) if ctx: rev = '%04d' % ctx.rev() if include_date: changeset_revision_date = get_readable_ctx_date( ctx ) if include_hash: label = "%s:%s (%s)" % ( str( ctx.rev() ), changeset_revision, changeset_revision_date ) else: label = "%s (%s)" % ( str( ctx.rev() ), changeset_revision_date ) else: if include_hash: label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) else: label = "%s" % str( ctx.rev() ) else: rev = '-1' if include_hash: label = "-1:%s" % changeset_revision else: label = "-1" return rev, label, changeset_revision
def __init__(self, repo, name=None, baseui=None): if isinstance(repo, str): if baseui: u = baseui.copy() else: u = ui.ui() self.repo = hg.repository(u, repo) else: self.repo = repo self.repo = self._getview(self.repo) self.repo.ui.setconfig("ui", "report_untrusted", "off") self.repo.baseui.setconfig("ui", "report_untrusted", "off") self.repo.ui.setconfig("ui", "nontty", "true") self.repo.baseui.setconfig("ui", "nontty", "true") hook.redirect(True) self.mtime = -1 self.size = -1 self.reponame = name self.archives = "zip", "gz", "bz2" self.stripecount = 1 # a repo owner may set web.templates in .hg/hgrc to get any file # readable by the user running the CGI script self.templatepath = self.config("web", "templates") self.websubtable = self.loadwebsub()
def set_destination(self, value): """ Sets the location destination of the repository. """ try: self._destination = value self._repository = hg.repository(ui.ui(), self._destination) except: self._repository = None
def fetch(self, repo_path, subdir=None, layout='auto', startrev=0, externals=None, noupdate=True, dest=None, rev=None, config=None): if layout == 'single': if subdir is None: subdir = 'trunk' elif subdir is None: subdir = '' projectpath = repo_path if subdir: projectpath += '/' + subdir cmd = [ 'clone', '--layout=%s' % layout, '--startrev=%s' % startrev, fileurl(projectpath), self.wc_path, ] if self.stupid: cmd.append('--stupid') if noupdate: cmd.append('--noupdate') if rev is not None: cmd.append('--rev=%s' % rev) config = dict(config or {}) if externals: config['hgsubversion.externals'] = str(externals) for k,v in reversed(sorted(config.iteritems())): cmd[:0] = ['--config', '%s=%s' % (k, v)] r = dispatch(cmd) assert not r, 'fetch of %s failed' % projectpath return hg.repository(testui(), self.wc_path)
def getTip(self): hg_ui = ui.ui() if hasattr(hg, 'peer'): repo = hg.peer(hg_ui, {}, self.uri) else: repo = hg.repository(hg_ui, self.uri) return short(repo.heads()[0])
def test_branch(self): ''' Test 'clone --branch' ''' ui = self.ui() _dispatch(ui, ['init', self.wc_path]) repo = self.repo repo.ui.setconfig('ui', 'username', 'anonymous') fpath = os.path.join(self.wc_path, 'it') f = file(fpath, 'w') f.write('C1') f.flush() commands.add(ui, repo) commands.branch(ui, repo, label="B1") commands.commit(ui, repo, message="C1") f.write('C2') f.flush() commands.branch(ui, repo, label="default") commands.commit(ui, repo, message="C2") f.write('C3') f.flush() commands.branch(ui, repo, label="B2") commands.commit(ui, repo, message="C3") self.assertEqual(len(repo), 3) branch = 'B1' _dispatch(ui, ['clone', self.wc_path, self.wc_path + '2', '--branch', branch]) repo2 = hg.repository(ui, self.wc_path + '2') self.assertEqual(repo[branch].hex(), repo2['.'].hex())
def test_update(self): ''' Test 'clone --updaterev' ''' ui = self.ui() _dispatch(ui, ['init', self.wc_path]) repo = self.repo repo.ui.setconfig('ui', 'username', 'anonymous') fpath = os.path.join(self.wc_path, 'it') f = file(fpath, 'w') f.write('C1') f.flush() commands.add(ui, repo) commands.commit(ui, repo, message="C1") f.write('C2') f.flush() commands.commit(ui, repo, message="C2") f.write('C3') f.flush() commands.commit(ui, repo, message="C3") self.assertEqual(len(repo), 3) updaterev = 1 _dispatch(ui, ['clone', self.wc_path, self.wc_path + '2', '--updaterev=%s' % updaterev]) repo2 = hg.repository(ui, self.wc_path + '2') self.assertEqual(str(repo[updaterev]), str(repo2['.']))
def __init__(self, rev = 'tip', path = '/home/calixte/dev/mozilla/mozilla-central.hg'): self.root = path self.ui = ui.ui() self.rev = rev self.repo = hg.repository(self.ui, path) self.ctx = self.repo[self.rev] self.haspushlog = hasattr(self.repo, 'pushlog')
def get_repo_changelog_tuples( repo_path ): repo = hg.repository( ui.ui(), repo_path ) changelog_tuples = [] for changeset in repo.changelog: ctx = repo.changectx( changeset ) changelog_tuples.append( ( ctx.rev(), str( ctx ) ) ) return changelog_tuples
def __init__(self, repo, ufiles, minpct, copies): super(RenameSearchThread, self).__init__() self.repo = hg.repository(ui.ui(), repo.root) self.ufiles = ufiles self.minpct = minpct self.copies = copies self.stopped = False
def post_save(cls, request, form, template_path): dir, file = os.path.dirname(template_path) + "/", os.path.basename(template_path) if request.user.first_name and request.user.last_name: author = "%s %s" % (request.user.first_name, request.user.last_name) else: author = request.user.username message = form.cleaned_data['commitmessage'] or '--' repo_path = None for template_dir in settings.TEMPLATE_DIRS: if dir.startswith(template_dir): if repo_path is None or len(template_dir)>len(repo_path): repo_path = template_dir if repo_path is None: raise TemplatesAdminException( _("Could not find template base directory") ) commit_file = template_path if commit_file.startswith(path): commit_file = commit_file[len(path):] if commit_file.startswith("/"): commit_file = commit_file[1:] uio = ui.ui(interactive=False, report_untrusted=False, quiet=True) repo = hg.repository(uio, path=path) repo.commit([str(commit_file)], text=message, user="******" % (author, request.user.email)) return "Template '%s' was committed succesfully into mercurial repository." % commit_file
def commit(self, key, data): """ commit changed ``data`` to the entity identified by ``key``. """ try: fobj = open(os.path.join(self.location, key), 'w') except IOError: #parent directory seems to be missing os.makedirs(os.dirname(os.path.join(self.location, key))) return self.commit(key, data) fobj.write(data) fobj.close() u = self.hg_ui repo = hg.repository(u, self.location) try: commands.add(u, repo, os.path.join(self.location, key)) except: raise commands.commit(u, repo, message='auto commit from django')
def test_handlePushes_repeated(self): repo = Repository.objects.create(name='mozilla-central', url='file:///' + self.repo) self.assertEqual(handlePushes(repo.pk, []), None) ui = mock_ui() hgcommands.init(ui, self.repo) hgrepo = repository(ui, self.repo) (open(hgrepo.pathto('file.dtd'), 'w').write(''' <!ENTITY key1 "Hello"> <!ENTITY key2 "Cruel"> ''')) hgcommands.addremove(ui, hgrepo) hgcommands.commit(ui, hgrepo, user="******", message="initial commit") rev0 = hgrepo[0].hex() timestamp = int(time.time()) pushjs0 = PushJS(100, { 'date': timestamp, 'changesets': [rev0], 'user': '******', }) # first time pushes_initial = Push.objects.all().count() result = handlePushes(repo.pk, [pushjs0]) self.assertEqual(result, 1) pushes_after = Push.objects.all().count() self.assertEqual(pushes_initial, pushes_after - 1) # a second time should be harmless result = handlePushes(repo.pk, [pushjs0]) self.assertEqual(result, 1) pushes_after_after = Push.objects.all().count() self.assertEqual(pushes_after, pushes_after_after)
def findoutgoing(self, parent): '''Return the base set of outgoing nodes. A caching wrapper around mercurial.localrepo.findoutgoing(). Complains (to the user), if the parent workspace is non-existent or inaccessible''' self.ui.pushbuffer() try: try: ui = self.ui if hasattr(cmdutil, 'remoteui'): ui = cmdutil.remoteui(ui, {}) pws = hg.repository(ui, parent) return self.repo.findoutgoing(pws) except HgRepoError: self.ui.warn("Warning: Parent workspace '%s' is not " "accessible\n" "active list will be incomplete\n\n" % parent) return [] finally: self.ui.popbuffer()
def run_wsgi(self, req): try: try: self.refresh() virtual = req.env.get("PATH_INFO", "").strip('/') tmpl = self.templater(req) ctype = tmpl('mimetype', encoding=encoding.encoding) ctype = templater.stringify(ctype) # a static file if virtual.startswith('static/') or 'static' in req.form: if virtual.startswith('static/'): fname = virtual[7:] else: fname = req.form['static'][0] static = templater.templatepath('static') return (staticfile(static, fname, req), ) # top-level index elif not virtual: req.respond(HTTP_OK, ctype) return self.makeindex(req, tmpl) # nested indexes and hgwebs repos = dict(self.repos) while virtual: real = repos.get(virtual) if real: req.env['REPO_NAME'] = virtual try: repo = hg.repository(self.ui, real) return hgweb(repo).run_wsgi(req) except IOError, inst: msg = inst.strerror raise ErrorResponse(HTTP_SERVER_ERROR, msg) except error.RepoError, inst: raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
def get_repo_stats(p): " p as repo path " date_first = None date_last = None totay_days = 0 count_days = 0 count_commits = 0 all_users = set() all_files = set() try: repo = hg.repository(ui.ui(), p) date_prev = None revctx = None for rev in repo: revctx = repo[rev] date = revctx.date() if not date_first: date_first = get_date_str(date) count_commits += 1 date_now = int(date[0] / 3600 / 24) count_days += 1 if date_prev != date_now else 0 date_prev = date_now all_files |= set(revctx.files()) all_users.add(revctx.user()) date_last = get_date_str(revctx.date()) total_days = (date_last - date_first).days return p, date_first.isoformat(), date_last.isoformat( ), count_days, total_days, count_commits, len(all_files), all_users except: return ('', ) * 8
def post_save(cls, request, form, template_path): dir = os.path.dirname(template_path) + os.sep file = os.path.basename(template_path) if request.user.first_name and request.user.last_name: author = "%s %s" % (request.user.first_name, request.user.last_name) else: author = request.user.username message = form.cleaned_data['commitmessage'] or '--' repo_path = None for template_dir in settings.TEMPLATE_DIRS: if dir.startswith(template_dir): if repo_path is None or len(template_dir) > len(repo_path): repo_path = template_dir if repo_path is None: raise TemplatesAdminException( _("Could not find template base directory")) commit_file = template_path if commit_file.startswith(path): commit_file = commit_file[len(path):] if commit_file.startswith("/"): commit_file = commit_file[1:] uio = ui.ui() uio.setconfig('ui', 'interactive', False) uio.setconfig('ui', 'report_untrusted', False) uio.setconfig('ui', 'quiet', True) repo = hg.repository(uio, path=repo_path) filter = match.match(repo.root, dir, [file]) repo.commit(match=filter, text=message, user="******" % (author, request.user.email)) return _( "Template '%s' was committed succesfully into mercurial repository." ) % file
def get_hg_revision(path="."): """ :rtype : str """ try: from mercurial import ui, hg, error except ImportError: return "HG-unknown" try: repo = hg.repository(ui.ui(), path) except (error.RepoError, IndexError): return "HG-unknown" ctx = repo[None] tags = ctx.tags() rev = ctx.branch() if rev: if rev != "default": return u'HG-%s' % rev elif tags == "tip": return u'HG-tip' elif tags != "": return u'%s' % tags
def servehgmo(orig, ui, repo, *args, **kwargs): """Wraps commands.serve to provide --hgmo flag.""" if kwargs.get('hgmo', False): kwargs['style'] = 'gitweb_mozilla' kwargs['templates'] = os.path.join(ROOT, 'hgtemplates') # ui.copy() is funky. Unless we do this, extension settings get # lost when calling hg.repository(). ui = ui.copy() def setconfig(name, paths): ui.setconfig('extensions', name, os.path.join(ROOT, 'hgext', *paths)) setconfig('pushlog', ['pushlog']) setconfig('pushlog-feed', ['pushlog-legacy', 'pushlog-feed.py']) # Since new extensions may have been flagged for loading, we need # to obtain a new repo instance to a) trigger loading of these # extensions b) force extensions' reposetup function to run. repo = hg.repository(ui, repo.root) return orig(ui, repo, *args, **kwargs)
def mozbuildinfocommand(ui, repo, *paths, **opts): if opts['pipemode']: data = json.loads(ui.fin.read()) repo = hg.repository(ui, path=data['repo']) ctx = repo[data['node']] paths = data['paths'] else: ctx = repo[opts['rev']] try: d = mozbuildinfo.filesinfo(repo, ctx, paths=paths) except Exception as e: d = {'error': 'Exception reading moz.build info: %s' % str(e)} if not d: d = {'error': 'no moz.build info available'} # TODO send data to templater. # Use stable output and indentation to make testing easier. ui.write(json.dumps(d, indent=2, sort_keys=True)) ui.write('\n') return
def __init__(self, repo, name=None, baseui=None): if isinstance(repo, str): if baseui: u = baseui.copy() else: u = ui.ui() r = hg.repository(u, repo) else: # we trust caller to give us a private copy r = repo r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') # displaying bundling progress bar while serving feel wrong and may # break some wsgi implementation. r.ui.setconfig('progress', 'disable', 'true', 'hgweb') r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] self._lastrepo = self._repos[0] hook.redirect(True) self.reponame = name
def check(dir, only): mercurial_ui = ui.ui() for layer1 in os.listdir(dir): # Public, private if only and layer1 != only: continue inlayer1full = os.path.join(dir, layer1) for layer2 in os.listdir(inlayer1full): # Repo names inlayer2full = os.path.join(inlayer1full, layer2) for layer3 in os.listdir(inlayer2full): # Repo artifacts (repo, wiki etc) inlayer3full = os.path.join(inlayer2full, layer3) if layer3 != "hg" or os.path.isfile(inlayer3full): continue repo = hg.repository(mercurial_ui, inlayer3full.encode('utf-8')) print("-----------\nrepo " + layer2 + "\n") commands.summary(mercurial_ui, repo) commands.bookmark(mercurial_ui, repo) print("-- Heads --") commands.heads(mercurial_ui, repo) print("")
def fetch(self, repo_path, subdir=None, layout='auto', startrev=0, externals=None, noupdate=True, dest=None, rev=None, config=None): if layout == 'single': if subdir is None: subdir = 'trunk' elif subdir is None: subdir = '' projectpath = repo_path if subdir: projectpath += '/' + subdir cmd = [ 'clone', '--quiet', '--layout=%s' % layout, '--startrev=%s' % startrev, fileurl(projectpath), self.wc_path, ] if self.stupid: cmd.append('--stupid') if noupdate: cmd.append('--noupdate') if rev is not None: cmd.append('--rev=%s' % rev) config = dict(config or {}) if externals: config['hgsubversion.externals'] = str(externals) config['subrepos.hgsubversion:allowed'] = 'true' for k,v in reversed(sorted(config.iteritems())): cmd[:0] = ['--config', '%s=%s' % (k, v)] r = dispatch(cmd) assert not r, 'fetch of %s failed' % projectpath return hg.repository(self.ui(subrepo=bool(externals)), self.wc_path)
def update_hg(path, skip_rebuild = False): from mercurial import hg, ui, commands f = open(os.path.join(path, "yt_updater.log"), "a") u = ui.ui() u.pushbuffer() config_fn = os.path.join(path, ".hg", "hgrc") print "Reading configuration from ", config_fn u.readconfig(config_fn) repo = hg.repository(u, path) commands.pull(u, repo) f.write(u.popbuffer()) f.write("\n\n") u.pushbuffer() commands.identify(u, repo) if "+" in u.popbuffer(): print "Can't rebuild modules by myself." print "You will have to do this yourself. Here's a sample commands:" print print " $ cd %s" % (path) print " $ hg up" print " $ %s setup.py develop" % (sys.executable) return 1 print "Updating the repository" f.write("Updating the repository\n\n") commands.update(u, repo, check=True) if skip_rebuild: return f.write("Rebuilding modules\n\n") p = subprocess.Popen([sys.executable, "setup.py", "build_ext", "-i"], cwd=path, stdout = subprocess.PIPE, stderr = subprocess.STDOUT) stdout, stderr = p.communicate() f.write(stdout) f.write("\n\n") if p.returncode: print "BROKEN: See %s" % (os.path.join(path, "yt_updater.log")) sys.exit(1) f.write("Successful!\n") print "Updated successfully."
def create_repo(self, dest, ui): vct = 'http://hg.mozilla.org/hgcustom/version-control-tools' commands.clone(ui, vct, dest=os.path.join(dest, 'vct.hg')) ui.setconfig('extensions', 'pushlog', os.path.join(dest, 'vct.hg/hgext/pushlog')) srcdir = os.path.join(dest, 'test') destdir = os.path.join(dest, 'testwork') if not os.path.exists(srcdir): os.makedirs(srcdir) commands.init(ui, srcdir) commands.init(ui, destdir) repo = hg.repository(ui, destdir) myfile1 = os.path.join(destdir, 'myfile1') myfile2 = os.path.join(destdir, 'myfile2') for i in range(5): with open(myfile1, 'a') as In: In.write(str(i)) with open(myfile2, 'a') as In: In.write(str(i)) commands.commit(ui, repo, myfile1, myfile2, message='message' + str(i), user='******', addremove=True) commands.push(ui, repo, dest=srcdir) time.sleep(1.01) return srcdir
def __init__(self, repoPath, local_site): from mercurial import hg, ui from mercurial.__version__ import version version_parts = [int(x) for x in version.split(".")] if version_parts[0] == 1 and version_parts[1] <= 2: hg_ui = ui.ui(interactive=False) else: hg_ui = ui.ui() hg_ui.setconfig('ui', 'interactive', 'off') # Check whether ssh is configured for mercurial. Assume that any # configured ssh is set up correctly for this repository. hg_ssh = hg_ui.config('ui', 'ssh') if not hg_ssh: logging.debug('Using rbssh for mercurial') hg_ui.setconfig('ui', 'ssh', 'rbssh --rb-local-site=%s' % local_site) else: logging.debug('Found configured ssh for mercurial: %s' % hg_ssh) self.repo = hg.repository(hg_ui, path=repoPath)
def liveversion(): 'Attempt to read the version from the live repository' utilpath = os.path.dirname(os.path.realpath(__file__)) thgpath = os.path.dirname(os.path.dirname(utilpath)) if not os.path.isdir(os.path.join(thgpath, '.hg')): raise error.RepoError(_('repository %s not found') % thgpath) u = ui.ui() repo = hg.repository(u, path=thgpath) u.pushbuffer() commands.identify(u, repo, id=True, tags=True, rev='.') l = u.popbuffer().split() while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags l.pop() if len(l) > 1: # tag found version = l[-1] if l[0].endswith('+'): # propagate the dirty status to the tag version += '+' elif len(l) == 1: # no tag found u.pushbuffer() commands.parents(u, repo, template='{latesttag}+{latesttagdistance}-') version = u.popbuffer() + l[0] return repo[None].branch(), version
def gmmerge(local, other): repo = hg.repository(ui.ui(), '.') guestrepo = load_guestrepo() gmlocald = guestrepo.readconfig(copy_into_root(repo, local), repo[None])[''] gmotherd = guestrepo.readconfig(copy_into_root(repo, other), repo[None])[''] conflicts = dict((key, (val, grotherd[key])) for (key, val) in gmlocald.iteritems() if (key in gmotherd and gmotherd[key] != val)) outstr = [ "%s = %s" % (key, val) for (key, val) in chain(gmlocald.iteritems(), gmotherd.iteritems()) if key not in conflicts ] for key, localval, otherval in conflicts: outstr.append('<<<<<<< local') pass return bool(conflicts), '\n'.join(outstr) + '\n'
def _do_case(self, name, layout): subdir = test_util.subdir.get(name, '') config = {} u = test_util.testui() for branch, path in test_util.custom.get(name, {}).iteritems(): config['hgsubversionbranch.%s' % branch] = path u.setconfig('hgsubversionbranch', branch, path) repo, repo_path = self.load_and_fetch(name, subdir=subdir, layout=layout, config=config) assert test_util.repolen(self.repo) > 0, \ 'Repo had no changes, maybe you need to add a subdir entry in test_util?' wc2_path = self.wc_path + '_stupid' checkout_path = repo_path if subdir: checkout_path += '/' + subdir u.setconfig('hgsubversion', 'stupid', '1') u.setconfig('hgsubversion', 'layout', layout) test_util.hgclone(u, test_util.fileurl(checkout_path), wc2_path, update=False) if layout == 'single': self.assertEqual(len(self.repo.heads()), 1) self.repo2 = hg.repository(test_util.testui(), wc2_path) self.assertEqual(self.repo.heads(), self.repo2.heads())
def setUp(self): self.repo_path = mkdtemp() self.repo_pathb = u2hg(self.repo_path) self.ui = ui.ui() self.ui.setconfig(b'ui', b'username', b'foo <*****@*****.**>') self.ui.setconfig(b'ui', b'quiet', True) commands.init(self.ui, self.repo_pathb) self.repo = hg.repository(self.ui, self.repo_pathb) file_dir = os.path.join(self.repo_path, 'content') if not os.path.isdir(file_dir): os.makedirs(file_dir) for i in range(3): file_path = os.path.join(file_dir, 'page-%i.rst' % i) with codecs.open(file_path, 'w', encoding='utf-8') as fp: fp.write(SAMPLE_PAGE) commands.add(self.ui, self.repo, u2hg(file_path)) file_path = os.path.join(file_dir, 'about.rst') with codecs.open(file_path, 'w', encoding='utf-8') as fp: fp.write(SAMPLE_PAGE + """ .. aliases: 301:/my-old-post-location/,/another-old-location/""") commands.add(self.ui, self.repo, u2hg(file_path)) file_dir = os.path.join(self.repo_path, 'content', 'post') if not os.path.isdir(file_dir): os.makedirs(file_dir) for i in range(3): file_path = os.path.join(file_dir, 'post-%i.rst' % i) with codecs.open(file_path, 'w', encoding='utf-8') as fp: fp.write(SAMPLE_POST) commands.add(self.ui, self.repo, u2hg(file_path)) file_path = os.path.join(file_dir, 'foo.rst') with codecs.open(file_path, 'w', encoding='utf-8') as fp: # using the page template, because we want to set tags manually fp.write(SAMPLE_PAGE + """ .. tags: foo, bar, lol""") commands.add(self.ui, self.repo, u2hg(file_path)) commands.commit(self.ui, self.repo, message=b'foo', user=b'foo')
def test_branch(self): ''' Test 'clone --branch' ''' ui = self.ui() _dispatch(ui, ['init', '--quiet', self.wc_path]) repo = self.repo repo.ui.setconfig('ui', 'username', 'anonymous') fpath = os.path.join(self.wc_path, 'it') f = file(fpath, 'w') f.write('C1') f.flush() commands.add(ui, repo) commands.branch(ui, repo, label="B1") commands.commit(ui, repo, message="C1") f.write('C2') f.flush() commands.branch(ui, repo, label="default") commands.commit(ui, repo, message="C2") f.write('C3') f.flush() commands.branch(ui, repo, label="B2") commands.commit(ui, repo, message="C3") self.assertEqual(test_util.repolen(repo), 3) branch = 'B1' _dispatch(ui, [ 'clone', '--quiet', self.wc_path, self.wc_path + '2', '--branch', branch ]) repo2 = hg.repository(ui, self.wc_path + '2') self.assertEqual( revsymbol(repo, branch).hex(), revsymbol(repo2, '.').hex())
def _do_case(self, name, stupid): subdir = test_util.subdir.get(name, '') config = { 'hgsubversion.stupid': stupid and '1' or '0', } repo, repo_path = self.load_and_fetch(name, subdir=subdir, layout='auto', config=config) assert test_util.repolen(self.repo) > 0, \ 'Repo had no changes, maybe you need to add a subdir entry in test_util?' wc2_path = self.wc_path + '_custom' checkout_path = repo_path if subdir: checkout_path += '/' + subdir u = test_util.testui(stupid=stupid, layout='custom') for branch, path in test_util.custom.get(name, {}).iteritems(): u.setconfig('hgsubversionbranch', branch, path) test_util.hgclone(u, test_util.fileurl(checkout_path), wc2_path, update=False) self.repo2 = hg.repository(test_util.testui(), wc2_path) self.assertEqual(self.repo.heads(), self.repo2.heads())
def refresh(self, request=None): st = get_stat(self.repo.spath) pst = get_stat(self.repo.spath, 'phaseroots') # changelog mtime and size, phaseroots mtime and size repostate = ((st.st_mtime, st.st_size), (pst.st_mtime, pst.st_size)) # we need to compare file size in addition to mtime to catch # changes made less than a second ago if repostate != self.repostate: r = hg.repository(self.repo.baseui, self.repo.url()) self.repo = self._getview(r) self.maxchanges = int(self.config("web", "maxchanges", 10)) self.stripecount = int(self.config("web", "stripes", 1)) self.maxshortchanges = int(self.config("web", "maxshortchanges", 60)) self.maxfiles = int(self.config("web", "maxfiles", 10)) self.allowpull = self.configbool("web", "allowpull", True) encoding.encoding = self.config("web", "encoding", encoding.encoding) # update these last to avoid threads seeing empty settings self.repostate = repostate # mtime is needed for ETag self.mtime = st.st_mtime if request: self.repo.ui.environ = request.env
def lfconvert(ui, src, dest, *pats, **opts): '''convert a normal repository to a largefiles repository Convert repository SOURCE to a new repository DEST, identical to SOURCE except that certain files will be converted as largefiles: specifically, any file that matches any PATTERN *or* whose size is above the minimum size threshold is converted as a largefile. The size used to determine whether or not to track a file as a largefile is the size of the first version of the file. The minimum size can be specified either with --size or in configuration as ``largefiles.size``. After running this command you will need to make sure that largefiles is enabled anywhere you intend to push the new repository. Use --to-normal to convert largefiles back to normal files; after this, the DEST repository can be used without largefiles at all.''' opts = pycompat.byteskwargs(opts) if opts['to_normal']: tolfile = False else: tolfile = True size = lfutil.getminsize(ui, True, opts.get('size'), default=None) if not hg.islocal(src): raise error.Abort(_('%s is not a local Mercurial repo') % src) if not hg.islocal(dest): raise error.Abort(_('%s is not a local Mercurial repo') % dest) rsrc = hg.repository(ui, src) ui.status(_('initializing destination %s\n') % dest) rdst = hg.repository(ui, dest, create=True) success = False dstwlock = dstlock = None try: # Get a list of all changesets in the source. The easy way to do this # is to simply walk the changelog, using changelog.nodesbetween(). # Take a look at mercurial/revlog.py:639 for more details. # Use a generator instead of a list to decrease memory usage ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0]) revmap = {node.nullid: node.nullid} if tolfile: # Lock destination to prevent modification while it is converted to. # Don't need to lock src because we are just reading from its # history which can't change. dstwlock = rdst.wlock() dstlock = rdst.lock() lfiles = set() normalfiles = set() if not pats: pats = ui.configlist(lfutil.longname, 'patterns') if pats: matcher = matchmod.match(rsrc.root, '', list(pats)) else: matcher = None lfiletohash = {} for ctx in ctxs: ui.progress(_('converting revisions'), ctx.rev(), unit=_('revisions'), total=rsrc['tip'].rev()) _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash) ui.progress(_('converting revisions'), None) if rdst.wvfs.exists(lfutil.shortname): rdst.wvfs.rmtree(lfutil.shortname) for f in lfiletohash.keys(): if rdst.wvfs.isfile(f): rdst.wvfs.unlink(f) try: rdst.wvfs.removedirs(rdst.wvfs.dirname(f)) except OSError: pass # If there were any files converted to largefiles, add largefiles # to the destination repository's requirements. if lfiles: rdst.requirements.add('largefiles') rdst._writerequirements() else: class lfsource(filemap.filemap_source): def __init__(self, ui, source): super(lfsource, self).__init__(ui, source, None) self.filemapper.rename[lfutil.shortname] = '.' def getfile(self, name, rev): realname, realrev = rev f = super(lfsource, self).getfile(name, rev) if (not realname.startswith(lfutil.shortnameslash) or f[0] is None): return f # Substitute in the largefile data for the hash hash = f[0].strip() path = lfutil.findfile(rsrc, hash) if path is None: raise error.Abort(_("missing largefile for '%s' in %s") % (realname, realrev)) return util.readfile(path), f[1] class converter(convcmd.converter): def __init__(self, ui, source, dest, revmapfile, opts): src = lfsource(ui, source) super(converter, self).__init__(ui, src, dest, revmapfile, opts) found, missing = downloadlfiles(ui, rsrc) if missing != 0: raise error.Abort(_("all largefiles must be present locally")) orig = convcmd.converter convcmd.converter = converter try: convcmd.convert(ui, src, dest, source_type='hg', dest_type='hg') finally: convcmd.converter = orig success = True finally: if tolfile: rdst.dirstate.clear() release(dstlock, dstwlock) if not success: # we failed, remove the new directory shutil.rmtree(rdst.root)
def _docheckout( ui, url, dest, upstream, revision, branch, purge, sharebase, optimes, behaviors, networkattemptlimit, networkattempts=None, sparse_profile=None, noupdate=False, ): if not networkattempts: networkattempts = [1] def callself(): return _docheckout( ui, url, dest, upstream, revision, branch, purge, sharebase, optimes, behaviors, networkattemptlimit, networkattempts=networkattempts, sparse_profile=sparse_profile, noupdate=noupdate, ) @contextlib.contextmanager def timeit(op, behavior): behaviors.add(behavior) errored = False try: start = time.time() yield except Exception: errored = True raise finally: elapsed = time.time() - start if errored: op += "_errored" optimes.append((op, elapsed)) ui.write(b"ensuring %s@%s is available at %s\n" % (url, revision or branch, dest)) # We assume that we're the only process on the machine touching the # repository paths that we were told to use. This means our recovery # scenario when things aren't "right" is to just nuke things and start # from scratch. This is easier to implement than verifying the state # of the data and attempting recovery. And in some scenarios (such as # potential repo corruption), it is probably faster, since verifying # repos can take a while. destvfs = vfs.vfs(dest, audit=False, realpath=True) def deletesharedstore(path=None): storepath = path or destvfs.read(b".hg/sharedpath").strip() if storepath.endswith(b".hg"): storepath = os.path.dirname(storepath) storevfs = vfs.vfs(storepath, audit=False) storevfs.rmtree(forcibly=True) if destvfs.exists() and not destvfs.exists(b".hg"): raise error.Abort(b"destination exists but no .hg directory") # Refuse to enable sparse checkouts on existing checkouts. The reasoning # here is that another consumer of this repo may not be sparse aware. If we # enabled sparse, we would lock them out. if destvfs.exists() and sparse_profile and not destvfs.exists(b".hg/sparse"): raise error.Abort( b"cannot enable sparse profile on existing " b"non-sparse checkout", hint=b"use a separate working directory to use sparse", ) # And the other direction for symmetry. if not sparse_profile and destvfs.exists(b".hg/sparse"): raise error.Abort( b"cannot use non-sparse checkout on existing sparse " b"checkout", hint=b"use a separate working directory to use sparse", ) # Require checkouts to be tied to shared storage because efficiency. if destvfs.exists(b".hg") and not destvfs.exists(b".hg/sharedpath"): ui.warn(b"(destination is not shared; deleting)\n") with timeit("remove_unshared_dest", "remove-wdir"): destvfs.rmtree(forcibly=True) # Verify the shared path exists and is using modern pooled storage. if destvfs.exists(b".hg/sharedpath"): storepath = destvfs.read(b".hg/sharedpath").strip() ui.write(b"(existing repository shared store: %s)\n" % storepath) if not os.path.exists(storepath): ui.warn(b"(shared store does not exist; deleting destination)\n") with timeit("removed_missing_shared_store", "remove-wdir"): destvfs.rmtree(forcibly=True) elif not re.search(b"[a-f0-9]{40}/\.hg$", storepath.replace(b"\\", b"/")): ui.warn( b"(shared store does not belong to pooled storage; " b"deleting destination to improve efficiency)\n" ) with timeit("remove_unpooled_store", "remove-wdir"): destvfs.rmtree(forcibly=True) if destvfs.isfileorlink(b".hg/wlock"): ui.warn( b"(dest has an active working directory lock; assuming it is " b"left over from a previous process and that the destination " b"is corrupt; deleting it just to be sure)\n" ) with timeit("remove_locked_wdir", "remove-wdir"): destvfs.rmtree(forcibly=True) def handlerepoerror(e): if pycompat.bytestr(e) == _(b"abandoned transaction found"): ui.warn(b"(abandoned transaction found; trying to recover)\n") repo = hg.repository(ui, dest) if not repo.recover(): ui.warn(b"(could not recover repo state; " b"deleting shared store)\n") with timeit("remove_unrecovered_shared_store", "remove-store"): deletesharedstore() ui.warn(b"(attempting checkout from beginning)\n") return callself() raise # At this point we either have an existing working directory using # shared, pooled storage or we have nothing. def handlenetworkfailure(): if networkattempts[0] >= networkattemptlimit: raise error.Abort( b"reached maximum number of network attempts; " b"giving up\n" ) ui.warn( b"(retrying after network failure on attempt %d of %d)\n" % (networkattempts[0], networkattemptlimit) ) # Do a backoff on retries to mitigate the thundering herd # problem. This is an exponential backoff with a multipler # plus random jitter thrown in for good measure. # With the default settings, backoffs will be: # 1) 2.5 - 6.5 # 2) 5.5 - 9.5 # 3) 11.5 - 15.5 backoff = (2 ** networkattempts[0] - 1) * 1.5 jittermin = ui.configint(b"robustcheckout", b"retryjittermin", 1000) jittermax = ui.configint(b"robustcheckout", b"retryjittermax", 5000) backoff += float(random.randint(jittermin, jittermax)) / 1000.0 ui.warn(b"(waiting %.2fs before retry)\n" % backoff) time.sleep(backoff) networkattempts[0] += 1 def handlepullerror(e): """Handle an exception raised during a pull. Returns True if caller should call ``callself()`` to retry. """ if isinstance(e, error.Abort): if e.args[0] == _(b"repository is unrelated"): ui.warn(b"(repository is unrelated; deleting)\n") destvfs.rmtree(forcibly=True) return True elif e.args[0].startswith(_(b"stream ended unexpectedly")): ui.warn(b"%s\n" % e.args[0]) # Will raise if failure limit reached. handlenetworkfailure() return True # TODO test this branch elif isinstance(e, error.ResponseError): if e.args[0].startswith(_(b"unexpected response from remote server:")): ui.warn(b"(unexpected response from remote server; retrying)\n") destvfs.rmtree(forcibly=True) # Will raise if failure limit reached. handlenetworkfailure() return True elif isinstance(e, ssl.SSLError): # Assume all SSL errors are due to the network, as Mercurial # should convert non-transport errors like cert validation failures # to error.Abort. ui.warn(b"ssl error: %s\n" % e) handlenetworkfailure() return True elif isinstance(e, urllibcompat.urlerr.urlerror): if isinstance(e.reason, socket.error): ui.warn(b"socket error: %s\n" % pycompat.bytestr(e.reason)) handlenetworkfailure() return True else: ui.warn( b"unhandled URLError; reason type: %s; value: %s\n" % (e.reason.__class__.__name__, e.reason) ) else: ui.warn( b"unhandled exception during network operation; type: %s; " b"value: %s\n" % (e.__class__.__name__, e) ) return False # Perform sanity checking of store. We may or may not know the path to the # local store. It depends if we have an existing destvfs pointing to a # share. To ensure we always find a local store, perform the same logic # that Mercurial's pooled storage does to resolve the local store path. cloneurl = upstream or url try: clonepeer = hg.peer(ui, {}, cloneurl) rootnode = peerlookup(clonepeer, b"0") except error.RepoLookupError: raise error.Abort(b"unable to resolve root revision from clone " b"source") except (error.Abort, ssl.SSLError, urllibcompat.urlerr.urlerror) as e: if handlepullerror(e): return callself() raise if rootnode == nullid: raise error.Abort(b"source repo appears to be empty") storepath = os.path.join(sharebase, hex(rootnode)) storevfs = vfs.vfs(storepath, audit=False) if storevfs.isfileorlink(b".hg/store/lock"): ui.warn( b"(shared store has an active lock; assuming it is left " b"over from a previous process and that the store is " b"corrupt; deleting store and destination just to be " b"sure)\n" ) if destvfs.exists(): with timeit("remove_dest_active_lock", "remove-wdir"): destvfs.rmtree(forcibly=True) with timeit("remove_shared_store_active_lock", "remove-store"): storevfs.rmtree(forcibly=True) if storevfs.exists() and not storevfs.exists(b".hg/requires"): ui.warn( b"(shared store missing requires file; this is a really " b"odd failure; deleting store and destination)\n" ) if destvfs.exists(): with timeit("remove_dest_no_requires", "remove-wdir"): destvfs.rmtree(forcibly=True) with timeit("remove_shared_store_no_requires", "remove-store"): storevfs.rmtree(forcibly=True) if storevfs.exists(b".hg/requires"): requires = set(storevfs.read(b".hg/requires").splitlines()) # FUTURE when we require generaldelta, this is where we can check # for that. required = {b"dotencode", b"fncache"} missing = required - requires if missing: ui.warn( b"(shared store missing requirements: %s; deleting " b"store and destination to ensure optimal behavior)\n" % b", ".join(sorted(missing)) ) if destvfs.exists(): with timeit("remove_dest_missing_requires", "remove-wdir"): destvfs.rmtree(forcibly=True) with timeit("remove_shared_store_missing_requires", "remove-store"): storevfs.rmtree(forcibly=True) created = False if not destvfs.exists(): # Ensure parent directories of destination exist. # Mercurial 3.8 removed ensuredirs and made makedirs race safe. if util.safehasattr(util, "ensuredirs"): makedirs = util.ensuredirs else: makedirs = util.makedirs makedirs(os.path.dirname(destvfs.base), notindexed=True) makedirs(sharebase, notindexed=True) if upstream: ui.write(b"(cloning from upstream repo %s)\n" % upstream) if not storevfs.exists(): behaviors.add(b"create-store") try: with timeit("clone", "clone"): shareopts = {b"pool": sharebase, b"mode": b"identity"} res = hg.clone( ui, {}, clonepeer, dest=dest, update=False, shareopts=shareopts, stream=True, ) except (error.Abort, ssl.SSLError, urllibcompat.urlerr.urlerror) as e: if handlepullerror(e): return callself() raise except error.RepoError as e: return handlerepoerror(e) except error.RevlogError as e: ui.warn(b"(repo corruption: %s; deleting shared store)\n" % e) with timeit("remove_shared_store_revlogerror", "remote-store"): deletesharedstore() return callself() # TODO retry here. if res is None: raise error.Abort(b"clone failed") # Verify it is using shared pool storage. if not destvfs.exists(b".hg/sharedpath"): raise error.Abort(b"clone did not create a shared repo") created = True # The destination .hg directory should exist. Now make sure we have the # wanted revision. repo = hg.repository(ui, dest) # We only pull if we are using symbolic names or the requested revision # doesn't exist. havewantedrev = False if revision: try: ctx = scmutil.revsingle(repo, revision) except error.RepoLookupError: ctx = None if ctx: if not ctx.hex().startswith(revision): raise error.Abort( b"--revision argument is ambiguous", hint=b"must be the first 12+ characters of a " b"SHA-1 fragment", ) checkoutrevision = ctx.hex() havewantedrev = True if not havewantedrev: ui.write(b"(pulling to obtain %s)\n" % (revision or branch,)) remote = None try: remote = hg.peer(repo, {}, url) pullrevs = [peerlookup(remote, revision or branch)] checkoutrevision = hex(pullrevs[0]) if branch: ui.warn( b"(remote resolved %s to %s; " b"result is not deterministic)\n" % (branch, checkoutrevision) ) if checkoutrevision in repo: ui.warn(b"(revision already present locally; not pulling)\n") else: with timeit("pull", "pull"): pullop = exchange.pull(repo, remote, heads=pullrevs) if not pullop.rheads: raise error.Abort(b"unable to pull requested revision") except (error.Abort, ssl.SSLError, urllibcompat.urlerr.urlerror) as e: if handlepullerror(e): return callself() raise except error.RepoError as e: return handlerepoerror(e) except error.RevlogError as e: ui.warn(b"(repo corruption: %s; deleting shared store)\n" % e) deletesharedstore() return callself() finally: if remote: remote.close() # Now we should have the wanted revision in the store. Perform # working directory manipulation. # Avoid any working directory manipulations if `-U`/`--noupdate` was passed if noupdate: ui.write(b"(skipping update since `-U` was passed)\n") return None # Purge if requested. We purge before update because this way we're # guaranteed to not have conflicts on `hg update`. if purge and not created: ui.write(b"(purging working directory)\n") purgeext = extensions.find(b"purge") # Mercurial 4.3 doesn't purge files outside the sparse checkout. # See https://bz.mercurial-scm.org/show_bug.cgi?id=5626. Force # purging by monkeypatching the sparse matcher. try: old_sparse_fn = getattr(repo.dirstate, "_sparsematchfn", None) if old_sparse_fn is not None: # TRACKING hg50 # Arguments passed to `matchmod.always` were unused and have been removed if util.versiontuple(n=2) >= (5, 0): repo.dirstate._sparsematchfn = lambda: matchmod.always() else: repo.dirstate._sparsematchfn = lambda: matchmod.always( repo.root, "" ) with timeit("purge", "purge"): if purgeext.purge( ui, repo, all=True, abort_on_err=True, # The function expects all arguments to be # defined. **{"print": None, "print0": None, "dirs": None, "files": None} ): raise error.Abort(b"error purging") finally: if old_sparse_fn is not None: repo.dirstate._sparsematchfn = old_sparse_fn # Update the working directory. if repo[b"."].node() == nullid: behaviors.add("empty-wdir") else: behaviors.add("populated-wdir") if sparse_profile: sparsemod = getsparse() # By default, Mercurial will ignore unknown sparse profiles. This could # lead to a full checkout. Be more strict. try: repo.filectx(sparse_profile, changeid=checkoutrevision).data() except error.ManifestLookupError: raise error.Abort( b"sparse profile %s does not exist at revision " b"%s" % (sparse_profile, checkoutrevision) ) # TRACKING hg48 - parseconfig takes `action` param if util.versiontuple(n=2) >= (4, 8): old_config = sparsemod.parseconfig( repo.ui, repo.vfs.tryread(b"sparse"), b"sparse" ) else: old_config = sparsemod.parseconfig(repo.ui, repo.vfs.tryread(b"sparse")) old_includes, old_excludes, old_profiles = old_config if old_profiles == {sparse_profile} and not old_includes and not old_excludes: ui.write( b"(sparse profile %s already set; no need to update " b"sparse config)\n" % sparse_profile ) else: if old_includes or old_excludes or old_profiles: ui.write( b"(replacing existing sparse config with profile " b"%s)\n" % sparse_profile ) else: ui.write(b"(setting sparse config to profile %s)\n" % sparse_profile) # If doing an incremental update, this will perform two updates: # one to change the sparse profile and another to update to the new # revision. This is not desired. But there's not a good API in # Mercurial to do this as one operation. with repo.wlock(), timeit("sparse_update_config", "sparse-update-config"): # pylint --py3k: W1636 fcounts = list( map( len, sparsemod._updateconfigandrefreshwdir( repo, [], [], [sparse_profile], force=True ), ) ) repo.ui.status( b"%d files added, %d files dropped, " b"%d files conflicting\n" % tuple(fcounts) ) ui.write(b"(sparse refresh complete)\n") op = "update_sparse" if sparse_profile else "update" behavior = "update-sparse" if sparse_profile else "update" with timeit(op, behavior): if commands.update(ui, repo, rev=checkoutrevision, clean=True): raise error.Abort(b"error updating") ui.write(b"updated to %s\n" % checkoutrevision) return None
self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False) self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default') self.lastbranch = None if os.path.isdir(path) and len(os.listdir(path)) > 0: try: self.repo = hg.repository(self.ui, path) if not self.repo.local(): raise NoRepo(_('%s is not a local Mercurial repo') % path) except error.RepoError, err: ui.traceback() raise NoRepo(err.args[0]) else: try: ui.status(_('initializing destination %s repository\n') % path) self.repo = hg.repository(self.ui, path, create=True) if not self.repo.local(): raise NoRepo(_('%s is not a local Mercurial repo') % path) self.created.append(path) except error.RepoError: ui.traceback() raise NoRepo("could not create hg repo %s as sink" % path) self.lock = None self.wlock = None self.filemapmode = False def before(self): self.ui.debug(_('run hg sink pre-conversion action\n')) self.wlock = self.repo.wlock() self.lock = self.repo.lock()
def build_changelog(docs_path, package_name="mezzanine"): """ Converts Mercurial commits into a changelog in RST format. """ project_path = os.path.join(docs_path, "..") version_file = os.path.join(package_name, "__init__.py") version_var = "__version__" changelog_filename = "CHANGELOG" changelog_file = os.path.join(project_path, changelog_filename) versions = SortedDict() repo = None ignore = ("AUTHORS", "formatting", "typo", "pep8", "pep 8", "whitespace", "README", "trans", "print debug", "debugging", "tabs", "style", "sites", "ignore", "tweak", "cleanup", "minor", "for changeset", ".com``", "oops", "syntax") hotfixes = { "40cbc47b8d8a": "1.0.9", "a25749986abc": "1.0.10", } # Load the repo. try: from mercurial import ui, hg, error from mercurial.commands import tag except ImportError: pass else: try: ui = ui.ui() repo = hg.repository(ui, project_path) except error.RepoError: return if repo is None: return # Go through each changeset and assign it to the versions dict. changesets = [repo.changectx(changeset) for changeset in repo.changelog] for cs in sorted(changesets, reverse=True, key=_changeset_date): # Check if the file with the version number is in this changeset # and if it is, pull it out and assign it as a variable. files = cs.files() new_version = False # Commit message cleanup hacks. description = cs.description().decode("utf-8") description = description.rstrip(".").replace("\n", ". ") while " " in description: description = description.replace(" ", " ") description = description.replace(". . ", ". ").replace("...", ",") while ".." in description: description = description.replace("..", ".") description = description.replace(":.", ":").replace("n'. t", "n't") words = description.split() # Format var names in commit. for i, word in enumerate(words): if (set("._") & set(word[:-1]) and set(letters) & set(word) and "`" not in word and not word[0].isdigit()): last = "" if word[-1] in ",.": last, word = word[-1], word[:-1] words[i] = "``%s``%s" % (word, last) description = " ".join(words) if version_file in files: for line in cs[version_file].data().split("\n"): if line.startswith(version_var): exec(line) if locals()[version_var] == "0.1.0": locals()[version_var] = "1.0.0" break versions[locals()[version_var]] = { "changes": [], "date": _changeset_date(cs).strftime("%b %d, %Y") } new_version = len(files) == 1 # Tag new versions. hotfix = hotfixes.get(cs.hex()[:12]) if hotfix or new_version: if hotfix: version_tag = hotfix else: try: version_tag = locals()[version_var] except KeyError: version_tag = None if version_tag and version_tag not in cs.tags(): try: tag(ui, repo, version_tag, rev=cs.hex()) print("Tagging version %s" % version_tag) except: pass # Ignore changesets that are merges, bumped the version, closed # a branch, regenerated the changelog itself, contain an ignore # word, or are one word long. merge = len(cs.parents()) > 1 branch_closed = len(files) == 0 changelog_update = changelog_filename in files ignored = [w for w in ignore if w.lower() in description.lower()] one_word = len(description.split()) == 1 if (merge or new_version or branch_closed or changelog_update or ignored or one_word): continue # Ensure we have a current version and if so, add this changeset's # description to it. version = None try: version = locals()[version_var] except KeyError: if not hotfix: continue user = cs.user().decode("utf-8").split("<")[0].strip() entry = "%s - %s" % (description, user) if hotfix or entry not in versions[version]["changes"]: if hotfix: versions[hotfix] = { "changes": [entry], "date": _changeset_date(cs).strftime("%b %d, %Y"), } else: versions[version]["changes"].insert(0, entry) # Write out the changelog. with open(changelog_file, "w") as f: for version, version_info in versions.items(): header = "Version %s (%s)" % (version, version_info["date"]) f.write("%s\n" % header) f.write("%s\n" % ("-" * len(header))) f.write("\n") if version_info["changes"]: for change in version_info["changes"]: f.write(" * %s\n" % change) else: f.write(" * No changes listed.\n") f.write("\n")
We could also just write directly to sys.stdout.buffer the way the ui object will, but this was easier for porting the test. """ print_(*args, **kwargs) sys.stdout.flush() def printb(data, end=b'\n'): out = getattr(sys.stdout, 'buffer', sys.stdout) out.write(data + end) out.flush() u = uimod.ui.load() repo = hg.repository(u, b'test1', create=1) os.chdir('test1') # create 'foo' with fixed time stamp f = open('foo', 'wb') f.write(b'foo\n') f.close() os.utime('foo', (1000, 1000)) # add+commit 'foo' repo[None].add([b'foo']) repo.commit(text=b'commit1', date=b"0 0") d = repo[None][b'foo'].date() if os.name == 'nt': d = d[:2]
def fetch(ui, repo, source='default', **opts): '''pull changes from a remote repository, merge new changes if needed. This finds all changes from the repository at the specified path or URL and adds them to the local repository. If the pulled changes add a new branch head, the head is automatically merged, and the result of the merge is committed. Otherwise, the working directory is updated to include the new changes. When a merge occurs, the newly pulled changes are assumed to be "authoritative". The head of the new changes is used as the first parent, with local changes as the second. To switch the merge order, use --switch-parent. See 'hg help dates' for a list of formats valid for -d/--date. ''' date = opts.get('date') if date: opts['date'] = util.parsedate(date) parent, p2 = repo.dirstate.parents() branch = repo.dirstate.branch() branchnode = repo.branchtags().get(branch) if parent != branchnode: raise util.Abort(_('working dir not at branch tip ' '(use "hg update" to check out branch tip)')) if p2 != nullid: raise util.Abort(_('outstanding uncommitted merge')) wlock = lock = None try: wlock = repo.wlock() lock = repo.lock() mod, add, rem, del_ = repo.status()[:4] if mod or add or rem: raise util.Abort(_('outstanding uncommitted changes')) if del_: raise util.Abort(_('working directory is missing some files')) bheads = repo.branchheads(branch) bheads = [head for head in bheads if len(repo[head].children()) == 0] if len(bheads) > 1: raise util.Abort(_('multiple heads in this branch ' '(use "hg heads ." and "hg merge" to merge)')) other = hg.repository(cmdutil.remoteui(repo, opts), ui.expandpath(source)) ui.status(_('pulling from %s\n') % url.hidepassword(ui.expandpath(source))) revs = None if opts['rev']: try: revs = [other.lookup(rev) for rev in opts['rev']] except error.CapabilityError: err = _("Other repository doesn't support revision lookup, " "so a rev cannot be specified.") raise util.Abort(err) # Are there any changes at all? modheads = repo.pull(other, heads=revs) if modheads == 0: return 0 # Is this a simple fast-forward along the current branch? newheads = repo.branchheads(branch) newheads = [head for head in newheads if len(repo[head].children()) == 0] newchildren = repo.changelog.nodesbetween([parent], newheads)[2] if len(newheads) == 1: if newchildren[0] != parent: return hg.clean(repo, newchildren[0]) else: return # Are there more than one additional branch heads? newchildren = [n for n in newchildren if n != parent] newparent = parent if newchildren: newparent = newchildren[0] hg.clean(repo, newparent) newheads = [n for n in newheads if n != newparent] if len(newheads) > 1: ui.status(_('not merging with %d other new branch heads ' '(use "hg heads ." and "hg merge" to merge them)\n') % (len(newheads) - 1)) return # Otherwise, let's merge. err = False if newheads: # By default, we consider the repository we're pulling # *from* as authoritative, so we merge our changes into # theirs. if opts['switch_parent']: firstparent, secondparent = newparent, newheads[0] else: firstparent, secondparent = newheads[0], newparent ui.status(_('updating to %d:%s\n') % (repo.changelog.rev(firstparent), short(firstparent))) hg.clean(repo, firstparent) ui.status(_('merging with %d:%s\n') % (repo.changelog.rev(secondparent), short(secondparent))) err = hg.merge(repo, secondparent, remind=False) if not err: # we don't translate commit messages message = (cmdutil.logmessage(opts) or ('Automated merge with %s' % url.removeauth(other.url()))) editor = cmdutil.commiteditor if opts.get('force_editor') or opts.get('edit'): editor = cmdutil.commitforceeditor n = repo.commit(message, opts['user'], opts['date'], editor=editor) ui.status(_('new changeset %d:%s merges remote changes ' 'with local\n') % (repo.changelog.rev(n), short(n))) finally: release(lock, wlock)
import sys from mercurial import hg, ui from datetime import datetime from collections import defaultdict repo = hg.repository(ui.ui(), sys.argv[1]) changes = repo.changelog revs = repo.revs('date(-30)') revs_by_dates = defaultdict(int) def order_by_date_items(revs): return sorted(revs.items(), cmp=lambda a, b: cmp(datetime.strptime(a[0], '%Y-%m-%d'), datetime.strptime(b[0], '%Y-%m-%d'))) for rev in revs: revision = changes.revision(rev) info = revision.split('\n') hash, commiter, timestamp = info[:3] seconds, tzshift = timestamp.split(' ')[:2] date = datetime.fromtimestamp(int(seconds)).strftime('%Y-%m-%d') revs_by_dates[date] += 1 print 'date\tcommits' print '\n'.join(('{:s}\t{:d}'.format(*item) for item in order_by_date_items(revs_by_dates)))
def test_info_output(self, custom=False): if custom: config = { 'hgsubversionbranch.default': 'trunk', 'hgsubversionbranch.the_branch': 'branches/the_branch', } else: config = {} repo, repo_path = self.load_and_fetch('two_heads.svndump', config=config) hg.update(self.repo, revsymbol(self.repo, 'the_branch')) u = self.ui() u.pushbuffer() svncommands.info(u, self.repo) actual = u.popbuffer() expected = (expected_info_output % { 'date': '2008-10-08 01:39:05 +0000 (Wed, 08 Oct 2008)', 'repourl': repourl(repo_path), 'branch': 'branches/the_branch', 'rev': 5, }) self.assertMultiLineEqual(actual, expected) hg.update(self.repo, revsymbol(self.repo, 'default')) u.pushbuffer() svncommands.info(u, self.repo) actual = u.popbuffer() expected = (expected_info_output % { 'date': '2008-10-08 01:39:29 +0000 (Wed, 08 Oct 2008)', 'repourl': repourl(repo_path), 'branch': 'trunk', 'rev': 6, }) self.assertMultiLineEqual(actual, expected) hg.update(self.repo, revsymbol(self.repo, 'default')) u.pushbuffer() svncommands.info(u, self.repo, rev=3) actual = u.popbuffer() expected = (expected_info_output % { 'date': '2008-10-08 01:39:05 +0000 (Wed, 08 Oct 2008)', 'repourl': repourl(repo_path), 'branch': 'branches/the_branch', 'rev': 5, }) self.assertMultiLineEqual(actual, expected) destpath = self.wc_path + '_clone' test_util.hgclone(u, self.repo, destpath) repo2 = hg.repository(u, destpath) repo2.ui.setconfig('paths', 'default-push', self.repo.ui.config('paths', 'default')) hg.update(repo2, revsymbol(self.repo, 'default')) svncommands.rebuildmeta(u, repo2, []) u.pushbuffer() svncommands.info(u, repo2) actual = u.popbuffer() expected = (expected_info_output % { 'date': '2008-10-08 01:39:29 +0000 (Wed, 08 Oct 2008)', 'repourl': repourl(repo_path), 'branch': 'trunk', 'rev': 6, }) self.assertMultiLineEqual(actual, expected)