def test_onerevision_noupdate(self): repo, repo_path = self._loadupdate('single_rev.svndump') state = repo.parents() self.add_svn_rev(repo_path, {'trunk/alpha': 'Changed'}) commands.pull(self.repo.ui, repo) self.assertEqual(state, repo.parents()) self.assertTrue('tip' not in repo[None].tags())
def _hg_repository_sync(name, url, submits, do_update=True): ui_ = ui() repopath = os.path.join(settings.REPOSITORY_BASE, name) configpath = os.path.join(repopath, '.hg', 'hgrc') if not os.path.isfile(configpath): if not os.path.isdir(os.path.dirname(repopath)): os.makedirs(os.path.dirname(repopath)) clone(ui_, str(url), str(repopath), pull=False, uncompressed=False, rev=[], noupdate=False) cfg = open(configpath, 'a') cfg.write('default-push = ssh%s\n' % str(url)[4:]) cfg.close() ui_.readconfig(configpath) hgrepo = repository(ui_, repopath) else: ui_.readconfig(configpath) hgrepo = repository(ui_, repopath) cs = submits[-1].changesets[-1] try: hgrepo.changectx(cs) except RepoError: pull(ui_, hgrepo, source=str(url), force=False, update=False, rev=[]) if do_update: update(ui_, hgrepo) return hgrepo
def download_patch(source, lastrev, patchbranch): from mercurial import hg, ui, localrepo, commands, bundlerepo UI = ui.ui() bundle = tempfile.mktemp(dir="/var/tmp") cwd = os.getcwd() os.chdir(base) try: repo0 = hg.repository(UI,base) repo0.ui.quiet=True repo0.ui.pushbuffer() commands.pull(repo0.ui, repo0, quiet=True) repo0.ui.popbuffer() # discard all pull output # find out what the head revision of the given branch is repo0.ui.pushbuffer() head = repo0.ui.popbuffer().strip() repo0.ui.pushbuffer() if commands.incoming(repo0.ui, repo0, source=source, branch=[patchbranch], bundle=bundle, force=False) != 0: raise ValueError, "Repository contains no changes" rhead = repo0.ui.popbuffer() if rhead: # output is a list of revisions, one per line. last line should be newest revision rhead = rhead.splitlines()[-1].split(':')[1] if rhead == lastrev: raise NotChanged repo=bundlerepo.bundlerepository(UI, ".", bundle) repo.ui.pushbuffer() old = 'max(ancestors(branch("%s"))-outgoing("%s"))' % (patchbranch, base) commands.diff(repo.ui, repo, rev=[old, patchbranch]) result = repo.ui.popbuffer() finally: os.chdir(cwd) if os.path.exists(bundle): os.unlink(bundle) return result, rhead
def _update_hg(path): from mercurial import hg, ui, commands f = open(os.path.join(path, "yt_updater.log"), "a") u = ui.ui() u.pushbuffer() config_fn = os.path.join(path, ".hg", "hgrc") print "Reading configuration from ", config_fn u.readconfig(config_fn) repo = hg.repository(u, path) commands.pull(u, repo) f.write(u.popbuffer()) f.write("\n\n") u.pushbuffer() commands.identify(u, repo) if "+" in u.popbuffer(): print "Can't rebuild modules by myself." print "You will have to do this yourself. Here's a sample commands:" print print " $ cd %s" % (path) print " $ hg up" print " $ %s setup.py develop" % (sys.executable) sys.exit(1) f.write("Rebuilding modules\n\n") p = subprocess.Popen([sys.executable, "setup.py", "build_ext", "-i"], cwd=path, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = p.communicate() f.write(stdout) f.write("\n\n") if p.returncode: print "BROKEN: See %s" % (os.path.join(path, "yt_updater.log")) sys.exit(1) f.write("Successful!\n")
def test_stupid_fallback_to_stupid_fullrevs(self): return to_patch = { 'mercurial.patch.patchbackend': _patchbackend_raise, 'stupid.diff_branchrev': stupid.diff_branchrev, 'stupid.fetch_branchrev': stupid.fetch_branchrev, } expected_calls = { 'mercurial.patch.patchbackend': 1, 'stupid.diff_branchrev': 1, 'stupid.fetch_branchrev': 1, } self.stupid = True repo, repo_path = self._loadupdate('single_rev.svndump') self.stupid = False # Passing stupid=True doesn't seem to be working - force it repo.ui.setconfig('hgsubversion', 'stupid', "true") state = repo[None].parents() calls, replaced = _monkey_patch(to_patch) try: self.add_svn_rev(repo_path, {'trunk/alpha': 'Changed'}) commands.pull(self.repo.ui, repo, update=True) self.failIfEqual(state, repo[None].parents()) self.assertTrue('tip' in repo[None].tags()) self.assertEqual(expected_calls, calls) finally: _monkey_unpatch(replaced)
def pull(self, source=None, target=None): from mercurial import commands, hg, ui, error log.debug("Clone or update HG repository.") source = source or self.source target = target or self.target # Folders need to be manually created if not os.path.exists(target): os.makedirs(target) # Doesn't work with unicode type url = str(source) path = str(target) try: repo = hg.repository(ui.ui(), path) commands.pull(ui.ui(), repo, source=url) commands.update(ui.ui(), repo) log.debug("Mercurial: repository at " + url + " updated.") except error.RepoError, e: log.debug("Mercurial: " + str(e)) try: commands.clone(ui.ui(), url, path) log.debug("Mercurial: repository at " + url + " cloned.") except Exception, e: log.debug("Mercurial: " + str(e)) raise PullFromRepositoryException(unicode(e))
def test_onerevision_doupdate(self): repo = self._load_fixture_and_fetch('single_rev.svndump') state = repo.parents() self._add_svn_rev({'trunk/alpha': 'Changed'}) commands.pull(self.repo.ui, repo, update=True) self.failIfEqual(state, repo.parents()) self.assertTrue('tip' in repo[None].tags())
def _process_hg_repository(self, manager, repo, remote): from mercurial import commands path = repo.path hgui = repo.ui self.env.log.debug('BitbucketSync: Executing a pull inside "%s"', path) commands.pull(hgui, repo.repo)
def test_old_tag_map_rebuilds(self): repo = self._load_fixture_and_fetch('tag_name_same_as_branch.svndump') tm = os.path.join(repo.path, 'svn', 'tagmap') open(tm, 'w').write('1\n') # force tags to load since it is lazily loaded when needed repo.svnmeta().tags commands.pull(repo.ui, repo) self.assertEqual(open(tm).read().splitlines()[0], '2')
def get_local_repo(self): rname = self.rbrepo.name if not os.path.exists(rname): commands.clone(self.ui, str(self.rbrepo.path), str(rname)) repo = hg.repository(self.ui, rname) commands.pull(self.ui, repo, self.rbrepo.path) return repo
def update(self, branch=None): """ Update the local repository for recent changes. """ if branch is None: branch = self.branch commands.pull(ui.ui(), self._repository, self.url) commands.update(ui.ui(), self._repository, None, branch, True)
def update(self, branch=None): """ Update the local repository for recent changes. """ if branch is None: branch = self.branch print "*** Updating to branch '%s'" % branch commands.pull(ui.ui(), self._repository, self.url) commands.update(ui.ui(), self._repository, None, branch, True)
def test_skip_delete_restore(self): repo, repo_path = self._loadupdate('delete_restore_trunk.svndump', rev=2) repo.ui.setconfig('hgsubversion', 'unsafeskip', '3 4') commands.pull(repo.ui, repo) tip = repo['tip'].rev() self.assertEqual(tip, 1) self.assertEquals(verify.verify(repo.ui, repo, rev=tip), 0)
def update(self): self._send_callback(self.callback_on_action_notify,_('Updating repository %s') % self._remote_path) try: self.cleanup() commands.pull(self.repo.ui, self.repo, rev=None, force=False, update=True) commands.update(self.repo.ui, self.repo, self.branch) self._process_files() except RepoError, e: raise BrowserException, e
def hgDownload(path, url): try: repo = hg.repository(mercurial_ui, path) except mercurial.error.RepoError: commands.clone(mercurial_ui, url, path) return True commands.pull(mercurial_ui, repo, url) return False
def update(self, stdout=None): """ Pull and update all changes from hg repository Note that this command destroy all local changes """ u, r = self._hg(stdout) commands.pull(u, r) commands.update(u, r, clean=True) del u, r
def handlePushes(repo_id, submits, do_update=True): if not submits: return repo = Repository.objects.get(id=repo_id) revisions = reduce(lambda r, l: r + l, [p.changesets for p in submits], []) ui = _ui() repopath = os.path.join(settings.REPOSITORY_BASE, repo.name) configpath = os.path.join(repopath, '.hg', 'hgrc') if not os.path.isfile(configpath): if not os.path.isdir(os.path.dirname(repopath)): os.makedirs(os.path.dirname(repopath)) clone(ui, str(repo.url), str(repopath), pull=False, uncompressed=False, rev=[], noupdate=False) cfg = open(configpath, 'a') cfg.write('default-push = ssh%s\n' % str(repo.url)[4:]) cfg.close() ui.readconfig(configpath) hgrepo = repository(ui, repopath) else: ui.readconfig(configpath) hgrepo = repository(ui, repopath) cs = submits[-1].changesets[-1] try: hgrepo.changectx(cs) except RepoError: pull(ui, hgrepo, source=str(repo.url), force=False, update=False, rev=[]) if do_update: update(ui, hgrepo) for data in submits: changesets = [] for revision in data.changesets: try: cs = getChangeset(repo, hgrepo, revision) transaction.commit() changesets.append(cs) except Exception, e: transaction.rollback() raise print repo.name, e p = Push.objects.create(repository=repo, push_id=data.id, user=data.user, push_date=datetime.utcfromtimestamp(data.date)) p.changesets = changesets p.save() transaction.commit()
def test_skip_basic(self): repo, repo_path = self._loadupdate('single_rev.svndump') self.add_svn_rev(repo_path, {'trunk/alpha': 'Changed'}) self.add_svn_rev(repo_path, {'trunk/beta': 'More changed'}) self.add_svn_rev(repo_path, {'trunk/gamma': 'Even more changeder'}) repo.ui.setconfig('hgsubversion', 'unsafeskip', '3 4') commands.pull(repo.ui, repo) tip = repo['tip'].rev() self.assertEqual(tip, 1) self.assertEquals(verify.verify(repo.ui, repo, rev=tip), 1)
def pull(self, url): """ Tries to pull changes from external location. """ url = self._get_url(url) try: pull(self.baseui, self._repo, url) except Abort, err: # Propagate error but with vcs's type raise RepositoryError(str(err))
def pop_queue(request, queue_name): # test count with # curl -i http://localhost:8000/q/default/ # curl -i http://localhost:8000/q/default/json/ # print "GET queue_name is %s" % queue_name q = None # pre-emptive queue name checking... try: q = Queue.objects.get(name=queue_name) except Queue.DoesNotExist: return HttpResponseNotFound() # msg = q.message_set.pop() response_message='void' if msg: u = ui.ui() message = json_encode(msg.message) project = Project.projects.get(project_id__exact = message['local_parent_project']) repo = Repo.objects.get(directory_name__exact=message['directory_name'], local_parent_project__exact=project) if (queue_name == 'repoclone'): try: hg.clone(u, str(repo.default_path), repo.repo_directory, True) repo.created = True except: response_message = 'failed' try: m = Message.objects.get(id=msg.id, queue=q.id) m.delete() repo.save() project.save() response_message = 'success' except: response_message = 'failed' elif (queue_name == 'repoupdate'): location = hg.repository(u, repo.repo_directory) try: commands.pull(u, location, str(repo.default_path), rev=['tip'], force=True, update=True) repo.folder_size = 0 for (path, dirs, files) in os.walk(repo.repo_directory): for file in files: filename = os.path.join(path, file) repo.folder_size += os.path.getsize(filename) repo.save() m = Message.objects.get(id=msg.id, queue=q.id) m.delete() project.save() response_message = 'success' except: response_message = 'failed' if (response_message == 'failed'): return HttpResponseServerError() else: return HttpResponse(response_message)
def handlePushes(repo_id, submits, do_update=True): if not submits: return repo = Repository.objects.get(id=repo_id) revisions = reduce(lambda r,l: r+l, [p.changesets for p in submits], []) ui = _ui() repopath = os.path.join(settings.REPOSITORY_BASE, repo.name) configpath = os.path.join(repopath, '.hg', 'hgrc') if not os.path.isfile(configpath): if not os.path.isdir(os.path.dirname(repopath)): os.makedirs(os.path.dirname(repopath)) clone(ui, str(repo.url), str(repopath), pull=False, uncompressed=False, rev=[], noupdate=False) cfg = open(configpath, 'a') cfg.write('default-push = ssh%s\n' % str(repo.url)[4:]) cfg.close() ui.readconfig(configpath) hgrepo = repository(ui, repopath) else: ui.readconfig(configpath) hgrepo = repository(ui, repopath) cs = submits[-1].changesets[-1] try: hgrepo.changectx(cs) except RepoError: pull(ui, hgrepo, source = str(repo.url), force=False, update=False, rev=[]) if do_update: update(ui, hgrepo) for data in submits: changesets = [] for revision in data.changesets: try: cs = getChangeset(repo, hgrepo, revision) transaction.commit() changesets.append(cs) except Exception, e: transaction.rollback() raise print repo.name, e p = Push.objects.create(repository = repo, push_id = data.id, user = data.user, push_date = datetime.utcfromtimestamp(data.date)) p.changesets = changesets p.save() transaction.commit()
def update(self): """ Pull updates from the upstream repository. If ``newest`` is set to False in the recipe or in the buildout configuration, no action is taken. """ if self.newest: self.logger.info("Pulling repository %s and updating %s" % ( self.repository, self.directory )) commands.pull(ui.ui(), hg.repository(ui.ui(), self.directory), self.repository, update=True)
def test_onerevision_divergent(self): repo = self._load_fixture_and_fetch('single_rev.svndump') self.commitchanges((('alpha', 'alpha', 'Changed another way'),)) state = repo.parents() self._add_svn_rev({'trunk/alpha': 'Changed one way'}) try: commands.pull(self.repo.ui, repo, update=True) except hgutil.Abort: # hg < 1.9 raised when crossing branches pass self.assertEqual(state, repo.parents()) self.assertTrue('tip' not in repo[None].tags()) self.assertEqual(len(repo.heads()), 2)
def _clone_or_pull(ui, repo, name, source, pull_opts): cache_path = os.path.join(repo.path, 'subtree-cache') if not os.path.exists(cache_path): os.makedirs(cache_path) subrepo_cache = os.path.join(cache_path, name) if not os.path.exists(subrepo_cache): os.makedirs(subrepo_cache) ui.status("initializing clean cache repo for %s in %s\n" % (name, subrepo_cache)) commands.init(ui, subrepo_cache) cache_repo = hg.repository(ui, path=subrepo_cache) commands.pull(ui, cache_repo, source=source, **pull_opts) return subrepo_cache
def checkout_hg(self): return 'hg', 'st' # pull new version of project from perository if not self.repo_path: # may be need to find repo recursively from this dir to up, but it's only may be. self.repo_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..',)) repo = hg.repository( ui.ui(), self.repo_path ) url = dict(repo.ui.configitems('paths', 'default'))['default'] commands.pull(ui.ui(), repo, url) # and update it commands.update(ui.ui(), repo) return
def hg_pull(source, revision, update, verbose): print 'update repo revision %s at %s' % (revision, source,) if has_mercurial: u = ui.ui() repo = hg.repository(u, '.') rev = [revision,] if revision else None commands.pull(u, repo, source = source, rev = rev, update = update, verbose = verbose) else: if revision: cmd = 'hg pull -u -r%s %s' % (revision, source) else: cmd = 'hg pull -u %s' % (source) print cmd os.system(cmd) print 'updated repo at %s' % (source,)
def pullAndMerge(self): """Run an hg pull and update. Overwrite all local changes by default. If anything goes wrong with the pull or update, clone instead. """ try: self.chmod() commands.pull(self.ui, self.repo, source=self.url) self.chmod() commands.update(self.ui, self.repo, clean=True) except error.RepoError: if os.path.exists(REPO_DIR): shutil.rmtree(REPO_DIR) self.clone() return
def override_pull(orig, ui, repo, source=None, **opts): if opts.get('rebase', False): repo._isrebasing = True try: if opts.get('update'): del opts['update'] ui.debug('--update and --rebase are not compatible, ignoring ' 'the update flag\n') del opts['rebase'] cmdutil.bailifchanged(repo) revsprepull = len(repo) origpostincoming = commands.postincoming def _dummy(*args, **kwargs): pass commands.postincoming = _dummy repo.lfpullsource = source if not source: source = 'default' try: result = commands.pull(ui, repo, source, **opts) finally: commands.postincoming = origpostincoming revspostpull = len(repo) if revspostpull > revsprepull: result = result or rebase.rebase(ui, repo) finally: repo._isrebasing = False else: repo.lfpullsource = source if not source: source = 'default' result = orig(ui, repo, source, **opts) return result
def cache_cmd(ui, source=None, **opts): if source is None and not opts.get('update'): raise hg.util.Abort(_("either SOURCE or --update is required")) print source if opts.get('update'): for repo_d in os.listdir(CACHE): if source is None or repo_d == url_to_filename(source): ui.status('updating cache {}\n'.format(repo_d)) cache_peer = hg.peer(ui, {}, os.path.join(CACHE, repo_d)) commands.pull(cache_peer.ui, cache_peer.local(), noupdate=True) else: if hg.islocal(source): raise hg.util.Abort(_("not caching local repo {}".format(source))) cache_d = os.path.join(CACHE, url_to_filename(source)) ui.status(_('caching {} to {}\n'.format(source, cache_d))) commands.clone(ui, source, cache_d, noupdate=True)
def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True): repo = self._factory.repo(wire) baseui = self._factory._create_config(wire['config'], hooks=hooks) # Mercurial internally has a lot of logic that checks ONLY if # option is defined, we just pass those if they are defined then opts = {} if bookmark: opts['bookmark'] = bookmark if branch: opts['branch'] = branch if revision: opts['rev'] = revision commands.pull(baseui, repo, source, **opts)
def _upgrade(ui, repo): ext_dir = os.path.dirname(os.path.abspath(__file__)) ui.debug('kiln: checking for extensions upgrade for %s\n' % ext_dir) try: r = localrepo.localrepository(hgui.ui(), ext_dir) except RepoError: commands.init(hgui.ui(), dest=ext_dir) r = localrepo.localrepository(hgui.ui(), ext_dir) r.ui.setconfig('kiln', 'autoupdate', False) r.ui.pushbuffer() try: source = 'https://developers.kilnhg.com/Repo/Kiln/Group/Kiln-Extensions' if commands.incoming(r.ui, r, bundle=None, force=False, source=source) != 0: # no incoming changesets, or an error. Don't try to upgrade. ui.debug('kiln: no extensions upgrade available\n') return ui.write(_('updating Kiln Extensions at %s... ') % ext_dir) # pull and update return falsy values on success if commands.pull(r.ui, r, source=source) or commands.update(r.ui, r, clean=True): url = urljoin(repo.url()[:repo.url().lower().index('/repo')], 'Tools') ui.write(_('unable to update\nvisit %s to download the newest extensions\n') % url) else: ui.write(_('complete\n')) except Exception, e: ui.debug(_('kiln: error updating Kiln Extensions: %s\n') % e)
def _upgrade(ui, repo): ext_dir = os.path.dirname(os.path.abspath(__file__)) ui.debug(_('kiln: checking for extensions upgrade for %s\n') % ext_dir) try: r = localrepo.localrepository(hgui.ui(), ext_dir) except RepoError: commands.init(hgui.ui(), dest=ext_dir) r = localrepo.localrepository(hgui.ui(), ext_dir) r.ui.setconfig('kiln', 'autoupdate', False) r.ui.pushbuffer() try: source = 'https://developers.kilnhg.com/Repo/Kiln/Group/Kiln-Extensions' if commands.incoming(r.ui, r, bundle=None, force=False, source=source) != 0: # no incoming changesets, or an error. Don't try to upgrade. ui.debug('kiln: no extensions upgrade available\n') return ui.write(_('updating Kiln Extensions at %s... ') % ext_dir) # pull and update return falsy values on success if commands.pull(r.ui, r, source=source) or commands.update(r.ui, r, clean=True): url = urljoin(repo.url()[:repo.url().lower().index('/repo')], 'Tools') ui.write(_('unable to update\nvisit %s to download the newest extensions\n') % url) else: ui.write(_('complete\n')) except Exception, e: ui.debug(_('kiln: error updating extensions: %s\n') % e) ui.debug(_('kiln: traceback: %s\n') % traceback.format_exc())
def _hg_repository_sync(name, url, submits, do_update=True): ui_ = ui() repopath = os.path.join(settings.REPOSITORY_BASE, name) configpath = os.path.join(repopath, '.hg', 'hgrc') if not os.path.isfile(configpath): if not os.path.isdir(os.path.dirname(repopath)): os.makedirs(os.path.dirname(repopath)) clone(ui_, str(url), str(repopath), pull=False, uncompressed=False, rev=[], noupdate=False) cfg = open(configpath, 'a') cfg.write('default-push = ssh%s\n' % str(url)[4:]) cfg.close() ui_.readconfig(configpath) hgrepo = repository(ui_, repopath) else: ui_.readconfig(configpath) hgrepo = repository(ui_, repopath) cs = submits[-1].changesets[-1] try: hgrepo.changectx(cs) except RepoError: pull(ui_, hgrepo, source=str(url), force=False, update=False, rev=[]) if do_update: # Make sure that we're not triggering workers in post 2.6 # hg. That's not stable, at least as we do it. # Monkey patch time try: from mercurial import worker if hasattr(worker, '_startupcost'): # use same value as hg for non-posix worker._startupcost = 1e30 except ImportError: # no worker, no problem pass update(ui_, hgrepo) return hgrepo
def test_most_recent_is_edited(self, stupid=False): repo = self._load_fixture_and_fetch('most-recent-is-edit-tag.svndump', stupid=stupid) self.repo.ui.status( "Note: this test failing may be because of a rebuildmeta failure.\n" "You should check that before assuming issues with this test.\n") wc2_path = self.wc_path + '2' src, dest = hg.clone(repo.ui, self.wc_path, wc2_path, update=False) svncommands.rebuildmeta(repo.ui, dest, args=[test_util.fileurl(self.repo_path), ]) commands.pull(self.repo.ui, self.repo, stupid=stupid) dtags, srctags = dest.tags(), self.repo.tags() dtags.pop('tip') srctags.pop('tip') self.assertEqual(dtags, srctags) self.assertEqual(dest.heads(), self.repo.heads())
def update(self): """ This method is run when a buildout environment should be updated. If the ``newest`` option is set, this will cause a pull from the upstream repository. """ if self.rev is None and self.newest: self.log.info("Pulling repository %s and updating %s" % ( self.source, self.destination )) commands.pull(ui.ui(), get_repository(self.destination), self.source, update = True) if self.as_egg: self._install_as_egg() else: # "newest" is also automatically disabled if "offline" # is set. self.log.info("Pulling is disabled for this part")
def kpull(ui, repo, bookmark=None): """Pull the changes from the specified remote bookmark into the local repository. """ if bookmarks.listbookmarks(repo): raise util.Abort("local repo must not have any bookmarks") bookmark = _read_bookmark(repo) if bookmark is None else bookmark return commands.pull(ui, repo, source='default', rev=[bookmark])
def _process_repository(self, name): if not name: return rm = RepositoryManager(self.env) trac_repo = rm.get_repository(name) if not trac_repo or not hasattr(trac_repo, 'path'): return # Global UI instance ui = mercurial_ui.ui() repo = hg.repository(ui, trac_repo.path) # Repo UI instance (with repo configuration) ui = repo.ui # Pulling from default source (as configured in repo configuration) commands.pull(ui, repo)
def test_updatemetahook(self): repo, repo_path = self._loadupdate('single_rev.svndump') state = repo[None].parents() self.add_svn_rev(repo_path, {'trunk/alpha': 'Changed'}) commands.pull(self.repo.ui, self.repo) # Clone to a new repository and add a hook new_wc_path = "%s-2" % self.wc_path commands.clone(self.repo.ui, self.wc_path, new_wc_path) newrepo = hg.repository(test_util.testui(), new_wc_path) newrepo.ui.setconfig('hooks', 'changegroup.meta', 'python:hgsubversion.hooks.updatemeta.hook') # Commit a rev that should trigger svn meta update self.add_svn_rev(repo_path, {'trunk/alpha': 'Changed Again'}) commands.pull(self.repo.ui, self.repo) self.called = False import hgsubversion.svncommands oldupdatemeta = hgsubversion.svncommands.updatemeta def _updatemeta(ui, repo, args=[]): self.called = True hgsubversion.svncommands.updatemeta = _updatemeta # Pull and make sure our updatemeta function gets called commands.pull(newrepo.ui, newrepo) hgsubversion.svncommands.updatemeta = oldupdatemeta self.assertTrue(self.called)
def test_updatemetahook(self): repo, repo_path = self._loadupdate('single_rev.svndump') state = repo.parents() self.add_svn_rev(repo_path, {'trunk/alpha': 'Changed'}) commands.pull(self.repo.ui, self.repo) # Clone to a new repository and add a hook new_wc_path = "%s-2" % self.wc_path commands.clone(self.repo.ui, self.wc_path, new_wc_path) newrepo = hg.repository(test_util.testui(), new_wc_path) newrepo.ui.setconfig('hooks', 'changegroup.meta', 'python:hgsubversion.hooks.updatemeta.hook') # Commit a rev that should trigger svn meta update self.add_svn_rev(repo_path, {'trunk/alpha': 'Changed Again'}) commands.pull(self.repo.ui, self.repo) self.called = False import hgsubversion.svncommands oldupdatemeta = hgsubversion.svncommands.updatemeta def _updatemeta(ui, repo, args=[]): self.called = True hgsubversion.svncommands.updatemeta = _updatemeta # Pull and make sure our updatemeta function gets called commands.pull(newrepo.ui, newrepo) hgsubversion.svncommands.updatemeta = oldupdatemeta self.assertTrue(self.called)
def download_patch(source, lastrev, patchbranch): from mercurial import hg, ui, localrepo, commands, bundlerepo UI = ui.ui() bundle = tempfile.mktemp(dir="/var/tmp") cwd = os.getcwd() os.chdir(base) try: repo0 = hg.repository(UI, base) repo0.ui.quiet = True repo0.ui.pushbuffer() commands.pull(repo0.ui, repo0, quiet=True) repo0.ui.popbuffer() # discard all pull output # find out what the head revision of the given branch is repo0.ui.pushbuffer() head = repo0.ui.popbuffer().strip() repo0.ui.pushbuffer() if commands.incoming(repo0.ui, repo0, source=source, branch=[patchbranch], bundle=bundle, force=False) != 0: raise ValueError, "Repository contains no changes" rhead = repo0.ui.popbuffer() if rhead: # output is a list of revisions, one per line. last line should be newest revision rhead = rhead.splitlines()[-1].split(':')[1] if rhead == lastrev: raise NotChanged repo = bundlerepo.bundlerepository(UI, ".", bundle) repo.ui.pushbuffer() old = 'max(ancestors(branch("%s"))-outgoing("%s"))' % (patchbranch, base) commands.diff(repo.ui, repo, rev=[old, patchbranch]) result = repo.ui.popbuffer() finally: os.chdir(cwd) if os.path.exists(bundle): os.unlink(bundle) return result, rhead
def test_most_recent_is_edited(self): repo, repo_path = self.load_and_fetch( 'most-recent-is-edit-tag.svndump') self.repo.ui.status( "Note: this test failing may be because of a rebuildmeta failure.\n" "You should check that before assuming issues with this test.\n") wc2_path = self.wc_path + '2' src, dest = test_util.hgclone(repo.ui, self.wc_path, wc2_path, update=False) dest = test_util.getlocalpeer(dest) svncommands.rebuildmeta(repo.ui, dest, args=[ test_util.fileurl(repo_path), ]) commands.pull(self.repo.ui, self.repo) dtags, srctags = dest.tags(), self.repo.tags() dtags.pop('tip') srctags.pop('tip') self.assertEqual(dtags, srctags) self.assertEqual(dest.heads(), self.repo.heads())
def tfreshen(ui, repo, *args, **opts): """ Pull recent changes from prod and merge them into the current branch. """ mustBeTopicRepo(repo) # Sanity checks if not(onTopicBranch(ui, repo) and isClean(ui, repo)): return 1 topicBranch = repo.dirstate.branch() commitStop = False # note if we get to that step # Pull new changes from the central repo if not opts.get('nopull', False): if tryCommand(ui, "pull", lambda:commands.pull(ui, repo, **opts) >= 2): return 1 # Are there any changes to merge? workDir = repo[None].parents()[0] prodTip = repo[repo.branchheads(repo.topicProdBranch)[0]] ancestor = prodTip.ancestor(workDir) #print("prodTip:" + str(prodTip) + ", workDir:" + str(workDir) + ", ancestor:" + str(ancestor)) if prodTip.node() == ancestor.node() or topicBranch in [p.branch() for p in prodTip.parents()]: ui.status(" (%s is still fresh)\n" % topicBranch) return 0 else: ui.status(" (will freshen %s from %s)\n" % (topicBranch, repo.topicProdBranch)) # Merge it. if doMerge(ui, repo, repo.topicProdBranch): return 1 # Stop if requested. if opts.get('nocommit', False): ui.status("\nStopping before commit as requested.\n") commitStop = True return 0 # Unlike a normal hg commit, if no text is specified we supply a reasonable default. text = opts.get('message') if text is None: text = "Merge recent changes from %s" % repo.topicProdBranch # Commit the merge. if tryCommand(ui, "commit", lambda:repo.commit(text) is None): return 1 if not opts.get('terse', False): ui.status("Done.\n")
def override_pull(orig, ui, repo, source=None, **opts): if opts.get('rebase', False): repo._isrebasing = True try: if opts.get('update'): del opts['update'] ui.debug('--update and --rebase are not compatible, ignoring ' 'the update flag\n') del opts['rebase'] cmdutil.bailifchanged(repo) revsprepull = len(repo) origpostincoming = commands.postincoming def _dummy(*args, **kwargs): pass commands.postincoming = _dummy repo.lfpullsource = source if not source: source = 'default' try: result = commands.pull(ui, repo, source, **opts) finally: commands.postincoming = origpostincoming revspostpull = len(repo) if revspostpull > revsprepull: result = result or rebase.rebase(ui, repo) finally: repo._isrebasing = False else: repo.lfpullsource = source if not source: source = 'default' oldheads = lfutil.getcurrentheads(repo) result = orig(ui, repo, source, **opts) # If we do not have the new largefiles for any new heads we pulled, we # will run into a problem later if we try to merge or rebase with one of # these heads, so cache the largefiles now direclty into the system # cache. ui.status(_("caching new largefiles\n")) numcached = 0 heads = lfutil.getcurrentheads(repo) newheads = set(heads).difference(set(oldheads)) for head in newheads: (cached, missing) = lfcommands.cachelfiles(ui, repo, head) numcached += len(cached) ui.status(_("%d largefiles cached\n") % numcached) return result
def overridepull(orig, ui, repo, source=None, **opts): revsprepull = len(repo) if not source: source = 'default' repo.lfpullsource = source if opts.get('rebase', False): repo._isrebasing = True try: if opts.get('update'): del opts['update'] ui.debug('--update and --rebase are not compatible, ignoring ' 'the update flag\n') del opts['rebase'] cmdutil.bailifchanged(repo) origpostincoming = commands.postincoming def _dummy(*args, **kwargs): pass commands.postincoming = _dummy try: result = commands.pull(ui, repo, source, **opts) finally: commands.postincoming = origpostincoming revspostpull = len(repo) if revspostpull > revsprepull: result = result or rebase.rebase(ui, repo) finally: repo._isrebasing = False else: result = orig(ui, repo, source, **opts) revspostpull = len(repo) lfrevs = opts.get('lfrev', []) if opts.get('all_largefiles'): lfrevs.append('pulled()') if lfrevs and revspostpull > revsprepull: numcached = 0 repo.firstpulled = revsprepull # for pulled() revset expression try: for rev in scmutil.revrange(repo, lfrevs): ui.note(_('pulling largefiles for revision %s\n') % rev) (cached, missing) = lfcommands.cachelfiles(ui, repo, rev) numcached += len(cached) finally: del repo.firstpulled ui.status(_("%d largefiles cached\n") % numcached) return result
def override_pull(orig, ui, repo, source=None, **opts): if opts.get("rebase", False): repo._isrebasing = True try: if opts.get("update"): del opts["update"] ui.debug("--update and --rebase are not compatible, ignoring " "the update flag\n") del opts["rebase"] cmdutil.bailifchanged(repo) revsprepull = len(repo) origpostincoming = commands.postincoming def _dummy(*args, **kwargs): pass commands.postincoming = _dummy repo.lfpullsource = source if not source: source = "default" try: result = commands.pull(ui, repo, source, **opts) finally: commands.postincoming = origpostincoming revspostpull = len(repo) if revspostpull > revsprepull: result = result or rebase.rebase(ui, repo) finally: repo._isrebasing = False else: repo.lfpullsource = source if not source: source = "default" oldheads = lfutil.getcurrentheads(repo) result = orig(ui, repo, source, **opts) # If we do not have the new largefiles for any new heads we pulled, we # will run into a problem later if we try to merge or rebase with one of # these heads, so cache the largefiles now direclty into the system # cache. ui.status(_("caching new largefiles\n")) numcached = 0 heads = lfutil.getcurrentheads(repo) newheads = set(heads).difference(set(oldheads)) for head in newheads: (cached, missing) = lfcommands.cachelfiles(ui, repo, head) numcached += len(cached) ui.status(_("%d largefiles cached\n") % numcached) return result
def override_pull(orig, ui, repo, source=None, **opts): if opts.get('rebase', False): repo._isrebasing = True try: if opts.get('update'): del opts['update'] ui.debug('--update and --rebase are not compatible, ignoring ' 'the update flag\n') del opts['rebase'] cmdutil.bailifchanged(repo) revsprepull = len(repo) origpostincoming = commands.postincoming def _dummy(*args, **kwargs): pass commands.postincoming = _dummy repo.lfpullsource = source if not source: source = 'default' try: result = commands.pull(ui, repo, source, **opts) finally: commands.postincoming = origpostincoming revspostpull = len(repo) if revspostpull > revsprepull: result = result or rebase.rebase(ui, repo) finally: repo._isrebasing = False else: repo.lfpullsource = source if not source: source = 'default' result = orig(ui, repo, source, **opts) # If we do not have the new largefiles for any new heads we pulled, we # will run into a problem later if we try to merge or rebase with one of # these heads, so cache the largefiles now direclty into the system # cache. ui.status(_("caching new largefiles\n")) numcached = 0 branches = repo.branchmap() for branch in branches: heads = repo.branchheads(branch) for head in heads: (cached, missing) = lfcommands.cachelfiles(ui, repo, head) numcached += len(cached) ui.status(_("%d largefiles cached\n" % numcached)) return result
def update(self, timeout=None): ok = True if timeout: self.ui.setconfig("ui", "timeout", timeout) try: if commands.pull(self.ui, self.repo): ok = False if commands.update(self.ui, self.repo): ok = False except IndexError: e = sys.exc_info()[ 1] # Needed because python 2.5 does not support 'as e' # except Exception,e: raise e return False return ok
def topen(ui, repo, *args, **opts): """ open (create) a new topic branch """ mustBeTopicRepo(repo) # Check the arguments if 'tmenu' in opts: resp = ui.prompt("Name for new branch:", None) if not resp: return 1 args = [resp] elif len(args) < 1: ui.warn("Error: You must specify a name for the new branch.\n") return 1 # Pull new changes from the central repo if not opts.get('nopull', False): if tryCommand(ui, "pull", lambda:commands.pull(ui, repo, **opts) >= 2): return 1 # Validate the name target = args[0] if target in topicBranchNames(repo, closed=True) + [repo.topicProdBranch]: ui.warn("Error: a branch with that name already exists; try choosing a different name.\n") return 1 # Make sure we're at the top of the prod branch if repo.dirstate.parents()[0] not in repo.branchheads('prod') or \ repo.dirstate.branch() != repo.topicProdBranch: if tryCommand(ui, "update %s" % quoteBranch(repo.topicProdBranch), \ lambda:commands.update(ui, repo, node=repo.topicProdBranch, check=True)): return 1 # Create the new branch and commit it. if tryCommand(ui, 'branch %s' % target, lambda:commands.branch(ui, repo, target)): return 1 text = "Opening branch %s" % quoteBranch(target) return tryCommand(ui, "commit", lambda:repo.commit(text) is None)
def tsync(ui, repo, *args, **opts): """ synchronize (pull & update) the current repo; also the topic extension itself. """ mustBeTopicRepo(repo) # Pull and update the current repo pullOpts = copy.deepcopy(opts) pullOpts['update'] = True if tryCommand(ui, "pull -u", lambda:commands.pull(ui, repo, **pullOpts) >= 2): return 1 # Then pull and update the topic extension topicDir = os.path.dirname(__file__) timeBefore = os.path.getmtime(os.path.join(topicDir, "topic.py")) if tryCommand(ui, "pull -R %s -u" % quoteBranch(topicDir), lambda:os.system('hg pull -R "%s" --quiet -u' % topicDir)): return 1 timeAfter = os.path.getmtime(os.path.join(topicDir, "topic.py")) if timeBefore != timeAfter: ui.status("Note: Topic extension has been updated.\n") if 'tmenu' in opts: ui.status("...restarting menu.\n") os.system("hg tmenu") sys.exit(0)
def update(self): commands.pull(self.ui, self.repo, self.url) commands.update(self.ui, self.repo)
def pull_repository(repo, repository_clone_url, ctx_rev): """Pull changes from a remote repository to a local one.""" commands.pull(get_configured_ui(), repo, source=repository_clone_url, rev=[ctx_rev])
def tclose(ui, repo, *args, **opts): """ close the current topic branch and push to the central repository """ mustBeTopicRepo(repo) # Sanity check if not isClean(ui, repo): return 1 if args: branches = args else: if not onTopicBranch(ui, repo): return 1 branches = [repo.dirstate.branch()] if 'tmenu' in opts: if ui.prompt("Branch '%s': close it?" % branches[0]).upper() != 'Y': return 1 opts = { 'nopull':False, 'nopush':False } pulled = False # only pull once for branch in branches: # Pull new changes from the central repo to avoid multiple-heads problem if not opts['nopull'] and not pulled: if tryCommand(ui, "pull", lambda:commands.pull(ui, repo, **opts) >= 2): return 1 pulled = True # Can't close already closed branches, nor any of the special branches if not repo.branchheads(branch) or branch in repo.topicSpecialBranches: ui.warn("Error: %s is not an open topic branch\n" % branch) return 1 # Now update to the head of the branch being closed if repo.dirstate.parents()[0] not in repo.branchheads(branch): if tryCommand(ui, "update %s" % quoteBranch(branch), lambda:commands.update(ui, repo, node=branch)): return 1 # Unlike a normal hg commit, if no text is specified we supply a reasonable default. branch = repo.dirstate.branch() text = opts.get('message') if text is None: text = "Closing %s" % branch # Close it if tryCommand(ui, "commit --close-branch", lambda:repo.commit(text, extra = {'close':'True'}) is None): return 1 # Aditionally, for this to not be considered a "head" it has to have a # child commit. So we have to merge into prod. First, update. # if tryCommand(ui, "update %s" % repo.topicProdBranch, lambda:commands.update(ui, repo, node=repo.topicProdBranch)): return 1 # Now merge, ignoring all conflicts. mergeOpts = copy.deepcopy(opts) mergeOpts['tool'] = "internal:fail" mergeOpts['noninteractive'] = True # Ignore return value... ok if merge fails tryCommand(ui, "merge -r %s" % quoteBranch(branch), lambda:commands.merge(ui, repo, node=branch, **mergeOpts), repo = repo) # Revert all files to prod (regardless of what happened on the branch) revertOpts = copy.deepcopy(opts) revertOpts['all'] = True revertOpts['rev'] = "." if tryCommand(ui, "revert -a -r .", lambda:commands.revert(ui, repo, **revertOpts), repo = repo): return 1 # Were there any merge conflicts? resolveOpts = copy.deepcopy(opts) resolveOpts['list'] = True if tryCommand(ui, "resolve -l", lambda:commands.resolve(ui, repo, **resolveOpts), repo = repo): return 1 # Anything that had a merge conflict, mark it resolved (by the revert) if ui.lastTryCommandOutput != '': resolveOpts = copy.deepcopy(opts) resolveOpts['all'] = True resolveOpts['mark'] = True if tryCommand(ui, "resolve -a -m", lambda:commands.resolve(ui, repo, **resolveOpts), repo = repo): return 1 # Commit the merge if tryCommand(ui, "commit", lambda:repo.commit(text) is None): return 1 # And push. if not opts['nopush']: pushOpts = copy.deepcopy(opts) if 'message' in pushOpts: del pushOpts['message'] pushOpts['force'] = True nameSet = set() for name, path in ui.configitems("paths"): nameSet.add(name) if tryCommand(ui, "push -f -b %s -b %s default" % (quoteBranch(branch), repo.topicProdBranch), lambda:commands.push(ui, repo, branch=(branch,repo.topicProdBranch), **pushOpts), repo=repo) > 1: return 1 if "dev" in nameSet: if tryCommand(ui, "push -f -b %s -b %s dev" % (quoteBranch(branch), repo.topicProdBranch), lambda:commands.push(ui, repo, branch=(branch,repo.topicProdBranch), dest="dev", **pushOpts), repo=repo) > 1: return 1 if "stage" in nameSet: if tryCommand(ui, "push -f -b %s -b %s stage" % (quoteBranch(branch), repo.topicProdBranch), lambda:commands.push(ui, repo, branch=(branch,repo.topicProdBranch), dest="stage", **pushOpts), repo=repo) > 1: return 1 if "prod" in nameSet: if tryCommand(ui, "push -f -b %s -b %s prod" % (quoteBranch(branch), repo.topicProdBranch), lambda:commands.push(ui, repo, branch=(branch,repo.topicProdBranch), dest="prod", **pushOpts), repo=repo) > 1: return 1 ui.status("Done.\n")
recipes = open("source/cookbook/recipes.rst", "w") else: recipes = cStringIO.StringIO() recipes.write(header) url = "here: http://bitbucket.org/yt_analysis/cookbook/raw/tip/%s ." def cond_output(f, v): if not v: f.write(".. rubric:: Sample Output\n\n") return True repo = hg.repository(uii, "../cookbook/") commands.pull(uii, repo, "http://bitbucket.org/yt_analysis/cookbook/") ctx = repo["tip"] for file in ctx: if not file.startswith("recipes/"): continue print("Parsing %s" % (file)) lines = ctx[file].data().split("\n") fn = file[8:-3] title = fn.replace("_", " ").capitalize() title += "\n" + "-" * len(title) + "\n" * 2 title = ".. _cookbook-%s:\n\n%s" % (fn, title) if lines[0] != '"""': print(" Bad docstring: breaking.") print(file) di = lines[1:].index('"""') docstring = lines[1 : di + 1]