def pull(self, source=None, target=None): from mercurial import commands, hg, ui, error log.debug("Clone or update HG repository.") source = source or self.source target = target or self.target # Folders need to be manually created if not os.path.exists(target): os.makedirs(target) # Doesn't work with unicode type url = str(source) path = str(target) try: repo = hg.repository(ui.ui(), path) commands.pull(ui.ui(), repo, source=url) commands.update(ui.ui(), repo) log.debug("Mercurial: repository at " + url + " updated.") except error.RepoError, e: log.debug("Mercurial: " + str(e)) try: commands.clone(ui.ui(), url, path) log.debug("Mercurial: repository at " + url + " cloned.") except Exception, e: log.debug("Mercurial: " + str(e)) raise PullFromRepositoryException(unicode(e))
def test_branchmap_rebuildmeta(self): '''test rebuildmeta on a branchmapped clone''' repo_path = self.load_svndump('branchmap.svndump') branchmap = open(self.branchmap, 'w') branchmap.write("badname = dit\n") branchmap.write("feature = dah\n") branchmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'branchmap', self.branchmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, branchmap=self.branchmap) originfo = self.repo.svnmeta().branches # clone & rebuild ui = self.ui() src, dest = test_util.hgclone(ui, self.wc_path, self.wc_path + '_clone', update=False) src = test_util.getlocalpeer(src) dest = test_util.getlocalpeer(dest) svncommands.rebuildmeta(ui, dest, args=[test_util.fileurl(repo_path)]) # just check the keys; assume the contents are unaffected by the branch # map and thus properly tested by other tests self.assertEquals(sorted(src.svnmeta().branches), sorted(dest.svnmeta().branches))
def _get_repo(self, create, src_url=None, update_after_clone=False): """ Function will check for mercurial repository in given path and return a localrepo object. If there is no repository in that path it will raise an exception unless ``create`` parameter is set to True - in that case repository would be created and returned. If ``src_url`` is given, would try to clone repository from the location at given clone_point. Additionally it'll make update to working copy accordingly to ``update_after_clone`` flag """ try: if src_url: url = str(self._get_url(src_url)) opts = {} if not update_after_clone: opts.update({"noupdate": True}) try: clone(self.baseui, url, self.path, **opts) except urllib2.URLError: raise Abort("Got HTTP 404 error") # Don't try to create if we've already cloned repo create = False return localrepository(self.baseui, self.path, create=create) except (Abort, RepoError), err: if create: msg = "Cannot create repository at %s. Original error was %s" % (self.path, err) else: msg = "Not valid repository at %s. Original error was %s" % (self.path, err) raise RepositoryError(msg)
def test_updatemetahook(self): repo, repo_path = self._loadupdate('single_rev.svndump') state = repo.parents() self.add_svn_rev(repo_path, {'trunk/alpha': 'Changed'}) commands.pull(self.repo.ui, self.repo) # Clone to a new repository and add a hook new_wc_path = "%s-2" % self.wc_path commands.clone(self.repo.ui, self.wc_path, new_wc_path) newrepo = hg.repository(test_util.testui(), new_wc_path) newrepo.ui.setconfig('hooks', 'changegroup.meta', 'python:hgsubversion.hooks.updatemeta.hook') # Commit a rev that should trigger svn meta update self.add_svn_rev(repo_path, {'trunk/alpha': 'Changed Again'}) commands.pull(self.repo.ui, self.repo) self.called = False import hgsubversion.svncommands oldupdatemeta = hgsubversion.svncommands.updatemeta def _updatemeta(ui, repo, args=[]): self.called = True hgsubversion.svncommands.updatemeta = _updatemeta # Pull and make sure our updatemeta function gets called commands.pull(newrepo.ui, newrepo) hgsubversion.svncommands.updatemeta = oldupdatemeta self.assertTrue(self.called)
def nclone(ui, source, dest=None, **opts): '''make a copy of an existing repository and all nested repositories Create a copy of an existing repository in a new directory. Look at the help of clone command for more informations.''' origsource = ui.expandpath(source) remotesource, remotebranch = hg.parseurl(origsource, opts.get('branch')) if hasattr(hg, 'peer'): remoterepo = hg.peer(ui, opts, remotesource) localrepo = remoterepo.local() if localrepo: remoterepo = localrepo else: remoterepo = hg.repository(hg.remoteui(ui, opts), remotesource) if dest is None: dest = hg.defaultdest(source) ui.status(_("destination directory: %s\n") % dest) for npath in remoterepo.nested: if npath == '.': npath = '' u = util.url(source) if u.scheme: nsource = '%s/%s' % (source, npath) else: nsource = os.path.join(source, npath) ndest = os.path.join(dest, npath) ui.status('[%s]\n' % os.path.normpath( os.path.join(os.path.basename(dest), ndest[len(dest) + 1:]))) commands.clone(ui, nsource, dest=ndest, **opts) ui.status('\n')
def get_yaml_from_mercurial(vcs_address, vcs_subdir): from mercurial import ui, commands from urllib2 import HTTPError import hglib vtemp = mkdtemp(prefix='multipkg-vcs-') try: commands.clone(ui.ui(), str(vcs_address), dest=vtemp) client = hglib.open(vtemp) # get index.yaml path_to_yaml = path_join(vtemp, vcs_subdir, 'index.yaml') yaml = yaml_load(file(path_to_yaml).read()) recent_changes = [] for entry in client.log('tip:tip^^'): num, rev, none, branch, author, msg, date = entry date = date.strftime('%Y-%m-%d %H:%M:%S') recent_changes.append("commit %s | Author: %s | Date: %s \n%s\n" % (rev, author, date, msg)) yaml['.'] = dict(recent_changes="\n".join(recent_changes)) return yaml except HTTPError: raise RemotePackageNotFoundError(vcs_address) except IOError as e: if e.errno == errno.ENOENT and e.filename.find('.yaml') > -1: raise IndexNotFoundError('index.yaml not found in your repository') raise except: raise finally: if isdir(vtemp): rmtree(vtemp)
def main(argv): # Find destination directory based on current file location destdir = os.path.abspath(os.path.join( os.path.dirname(__file__), '..', '..')) # Read the configuration file for the shared repository to get the pull path repo = hg.repository( ui.ui(), os.path.join(os.path.dirname(__file__), '..')) sharedpath = repo.ui.config('paths', 'default', None) if sharedpath is None: raise Exception('no default path in the shared directory!') unstable = sharedpath.endswith('-unstable') path = os.path.dirname(sharedpath) print 'using %s as remote repository path' % path for module in reduce(lambda x, y: x + y.split(','), argv, []): if module.endswith('-unstable'): module = module[:-len('-unstable')] if not os.path.exists(os.path.join(destdir, module)): # Attempt to clone the repository to the destination if module == "GUIRipper-Plugin-JFC" or module == "GUIRipper-Core" or module == "GUITARModel-Plugin-JFC" or module == "GUITARModel-Core" or module == "GUIReplayer-Plugin-JFC" or module == "GUIReplayer-Core": call("git clone git://github.com/cmsc435sikuli/" + module + ".git " + destdir + "/" + module, shell=True) else: url = '%s/%s%s' % (path, module, '-unstable' if unstable else '') print 'checking out %s to %s' % (url, destdir) commands.clone(ui.ui(), url, os.path.join(destdir, module)) else: # Repository already exists, skip print '%s already exists (skipping)' % module
def clone_repo(ui, clone_url, target_dir, fullName): """ Clones the specified repository. Returns True if successful. If the clone fails, prints an error message and returns False, without aborting the entire process. """ # If the filesystem does not use Unicode (from Python’s perspective), # convert target_dir to plain ASCII. if not sys.getfilesystemencoding().upper().startswith('UTF'): target_dir = target_dir.encode('ascii', 'xmlcharrefreplace') # Mercurial API throws an exception if target_dir is passed (below) # as unicode object, so it's required to be converted to string. else: target_dir = str(target_dir) # If directory already exists... if os.path.isdir(target_dir): # ... and is not empty... if os.listdir(target_dir): # ... just skip it, without aborting the entire process. ui.warn(encode_stderr(('Warning: directory %s already exists ' + 'and is not empty, it will be skipped.\n') % target_dir)) return False # Otherwise create required catalogs tree. else: os.makedirs(target_dir) ui.status(encode_stdout('\nCloning "%s" repository\n' % fullName)) commands.clone(ui, clone_url, dest=target_dir) return True
def internal_push_over_svnserve(self, subdir='', commit=True): test_util.load_svndump_fixture(self.repo_path, 'simple_branch.svndump') open(os.path.join(self.repo_path, 'conf', 'svnserve.conf'), 'w').write('[general]\nanon-access=write\n[sasl]\n') self.port = random.randint(socket.IPPORT_USERRESERVED, 65535) self.host = 'localhost' args = ['svnserve', '--daemon', '--foreground', '--listen-port=%d' % self.port, '--listen-host=%s' % self.host, '--root=%s' % self.repo_path] svnserve = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) self.svnserve_pid = svnserve.pid try: time.sleep(2) import shutil shutil.rmtree(self.wc_path) commands.clone(self.ui(), 'svn://%s:%d/%s' % (self.host, self.port, subdir), self.wc_path, noupdate=True) repo = self.repo old_tip = repo['tip'].node() expected_parent = repo['default'].node() def file_callback(repo, memctx, path): if path == 'adding_file': return context.memfilectx(path=path, data='foo', islink=False, isexec=False, copied=False) raise IOError(errno.EINVAL, 'Invalid operation: ' + path) ctx = context.memctx(repo, parents=(repo['default'].node(), node.nullid), text='automated test', files=['adding_file'], filectxfn=file_callback, user='******', date='2008-10-07 20:59:48 -0500', extra={'branch': 'default',}) new_hash = repo.commitctx(ctx) if not commit: return # some tests use this test as an extended setup. hg.update(repo, repo['tip'].node()) oldauthor = repo['tip'].user() commands.push(repo.ui, repo) tip = self.repo['tip'] self.assertNotEqual(oldauthor, tip.user()) self.assertNotEqual(tip.node(), old_tip) self.assertEqual(tip.parents()[0].node(), expected_parent) self.assertEqual(tip['adding_file'].data(), 'foo') self.assertEqual(tip.branch(), 'default') # unintended behaviour: self.assertNotEqual('an_author', tip.user()) self.assertEqual('(no author)', tip.user().rsplit('@', 1)[0]) finally: # TODO: use svnserve.kill() in Python >2.5 test_util.kill_process(svnserve)
def upload( self, trans, **kwd ): message = escape( kwd.get( 'message', '' ) ) status = kwd.get( 'status', 'done' ) commit_message = escape( kwd.get( 'commit_message', 'Uploaded' ) ) category_ids = util.listify( kwd.get( 'category_id', '' ) ) categories = suc.get_categories( trans.app ) repository_id = kwd.get( 'repository_id', '' ) repository = suc.get_repository_in_tool_shed( trans.app, repository_id ) repo_dir = repository.repo_path( trans.app ) repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False ) uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) ) remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) ) uploaded_file = None upload_point = commit_util.get_upload_point( repository, **kwd ) tip = repository.tip( trans.app ) file_data = kwd.get( 'file_data', '' ) url = kwd.get( 'url', '' ) # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new( trans.app ) uploaded_directory = None if kwd.get( 'upload_button', False ): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith( 'hg' ): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[ len( 'hg' ): ] repo_url = repo_url.encode( 'ascii', 'replace' ) try: commands.clone( hg_util.get_configured_ui(), repo_url, uploaded_directory ) except Exception, e: message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string( str( e ) ) status = 'error' basic_util.remove_dir( uploaded_directory ) uploaded_directory = None elif url: valid_url = True try: stream = urllib.urlopen( url ) except Exception, e: valid_url = False message = 'Error uploading file via http: %s' % str( e ) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open( uploaded_file_name, 'wb' ) while 1: chunk = stream.read( util.CHUNK_SIZE ) if not chunk: break uploaded_file.write( chunk ) uploaded_file.flush() uploaded_file_filename = url.split( '/' )[ -1 ] isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
def load_fixture_and_fetch(fixture_name, repo_path, wc_path, stupid=False, subdir='', noupdate=True, layout='auto'): load_svndump_fixture(repo_path, fixture_name) if subdir: repo_path += '/' + subdir _ui = testui(stupid=stupid, layout=layout) commands.clone(_ui, fileurl(repo_path), wc_path, noupdate=noupdate) return hg.repository(testui(), wc_path)
def clone(self): """If we don't have a copy of the open-ihm repository on disk clone one now. """ try: self.chmod() commands.clone(self.ui, self.url, dest=REPO_DIR, insecure=True) except Exception: self.fail() return
def test_diff_base_against_clone(self): """Test that the right error is raised on trying to do a diff across a different divergant clone""" ui = mock_ui() orig = os.path.join(settings.REPOSITORY_BASE, "orig") clone = os.path.join(settings.REPOSITORY_BASE, "clone") hgcommands.init(ui, orig) hgorig = repository(ui, orig) ( open(hgorig.pathto("file.dtd"), "w").write( """ <!ENTITY old "content we will delete"> <!ENTITY mod "this has stuff to keep and delete"> """ ) ) hgcommands.addremove(ui, hgorig) hgcommands.commit(ui, hgorig, user="******", message="initial commit") assert len(hgorig) == 1 # 1 commit # set up a second repo called 'clone' hgcommands.clone(ui, orig, clone) hgclone = repository(ui, clone) # new commit on base ( open(hgorig.pathto("file.dtd"), "w").write( """ <!ENTITY mod "this has stuff to keep and add"> <!ENTITY new "this has stuff that is new"> """ ) ) hgcommands.commit(ui, hgorig, user="******", message="second commit on base") assert len(hgorig) == 2 # 2 commits rev_from = hgorig[1].hex() # different commit on clone ( open(hgclone.pathto("file.dtd"), "w").write( """ <!ENTITY mod "this has stuff to keep and change"> <!ENTITY new_in_clone "this has stuff that is different from base"> """ ) ) hgcommands.commit(ui, hgclone, user="******", message="a different commit on clone") rev_to = hgclone[1].hex() Repository.objects.create(name="orig", url="http://localhost:8001/orig/") Repository.objects.create(name="clone", url="http://localhost:8001/clone/") url = reverse("pushes.views.diff") # right now, we can't diff between repos, this might change! self.assertRaises(RepoError, self.client.get, url, {"repo": "clone", "from": rev_from[:12], "to": rev_to[:12]})
def test_file_map_exclude(self, stupid=False): test_util.load_svndump_fixture(self.repo_path, 'replace_trunk_with_branch.svndump') filemap = open(self.filemap, 'w') filemap.write("exclude alpha\n") filemap.close() ui = self.ui(stupid) ui.setconfig('hgsubversion', 'filemap', self.filemap) commands.clone(ui, test_util.fileurl(self.repo_path), self.wc_path, filemap=self.filemap) self.assertEqual(node.hex(self.repo[0].node()), '2c48f3525926ab6c8b8424bcf5eb34b149b61841') self.assertEqual(node.hex(self.repo['default'].node()), 'b37a3c0297b71f989064d9b545b5a478bbed7cc1')
def test_file_map(self, stupid=False): test_util.load_svndump_fixture(self.repo_path, 'replace_trunk_with_branch.svndump') filemap = open(self.filemap, 'w') filemap.write("include alpha\n") filemap.close() ui = self.ui(stupid) ui.setconfig('hgsubversion', 'filemap', self.filemap) commands.clone(ui, test_util.fileurl(self.repo_path), self.wc_path, filemap=self.filemap) self.assertEqual(node.hex(self.repo[0].node()), '88e2c7492d83e4bf30fbb2dcbf6aa24d60ac688d') self.assertEqual(node.hex(self.repo['default'].node()), 'e524296152246b3837fe9503c83b727075835155')
def test_empty_log_message(self): repo, repo_path = self.load_and_fetch('empty-log-message.svndump') self.assertEqual(repo['tip'].description(), '') test_util.rmtree(self.wc_path) ui = self.ui() ui.setconfig('hgsubversion', 'defaultmessage', 'blyf') commands.clone(ui, test_util.fileurl(repo_path), self.wc_path) self.assertEqual(self.repo['tip'].description(), 'blyf')
def bb_create(ui, reponame, **opts): data = { 'name': reponame, 'description': opts.get('description'), 'language': opts.get('language'), 'website': opts.get('website'), } _bb_apicall(ui, 'repositories', data) # if this completes without exception, assume the request was successful, # and clone the new repo ui.write('repository created, cloning...\n') commands.clone(ui, 'bb://' + reponame)
def test_branchmap_empty_commit(self): '''test mapping an empty commit on a renamed branch''' repo_path = self.load_svndump('propset-branch.svndump') branchmap = open(self.branchmap, 'w') branchmap.write("the-branch = bob\n") branchmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'branchmap', self.branchmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, branchmap=self.branchmap) branches = set(self.repo[i].branch() for i in self.repo) self.assertEquals(sorted(branches), ['bob', 'default'])
def test_branchmap_tagging(self): '''test tagging a renamed branch, which used to raise an exception''' repo_path = self.load_svndump('commit-to-tag.svndump') branchmap = open(self.branchmap, 'w') branchmap.write("magic = art\n") branchmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'branchmap', self.branchmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, branchmap=self.branchmap) branches = set(self.repo[i].branch() for i in self.repo) self.assertEquals(sorted(branches), ['art', 'closeme'])
def handlePushes(repo_id, submits, do_update=True): if not submits: return repo = Repository.objects.get(id=repo_id) revisions = reduce(lambda r,l: r+l, [p.changesets for p in submits], []) ui = _ui() repopath = os.path.join(settings.REPOSITORY_BASE, repo.name) configpath = os.path.join(repopath, '.hg', 'hgrc') if not os.path.isfile(configpath): if not os.path.isdir(os.path.dirname(repopath)): os.makedirs(os.path.dirname(repopath)) clone(ui, str(repo.url), str(repopath), pull=False, uncompressed=False, rev=[], noupdate=False) cfg = open(configpath, 'a') cfg.write('default-push = ssh%s\n' % str(repo.url)[4:]) cfg.close() ui.readconfig(configpath) hgrepo = repository(ui, repopath) else: ui.readconfig(configpath) hgrepo = repository(ui, repopath) cs = submits[-1].changesets[-1] try: hgrepo.changectx(cs) except RepoError: pull(ui, hgrepo, source = str(repo.url), force=False, update=False, rev=[]) if do_update: update(ui, hgrepo) for data in submits: changesets = [] for revision in data.changesets: try: cs = getChangeset(repo, hgrepo, revision) transaction.commit() changesets.append(cs) except Exception, e: transaction.rollback() raise print repo.name, e p = Push.objects.create(repository = repo, push_id = data.id, user = data.user, push_date = datetime.utcfromtimestamp(data.date)) p.changesets = changesets p.save() transaction.commit()
def test_author_map_closing_author(self): repo_path = self.load_svndump('replace_trunk_with_branch.svndump') authormap = open(self.authors, 'w') authormap.write("evil=Testy <test@test>") authormap.close() ui = self.ui() ui.setconfig('hgsubversion', 'authormap', self.authors) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, authors=self.authors) self.assertEqual(self.repo[0].user(), 'Augie@5b65bade-98f3-4993-a01f-b7a6710da339') self.assertEqual(self.repo['tip'].user(), 'Testy <test@test>')
def test_tagren_changed(self): repo_path = self.load_svndump('commit-to-tag.svndump') tagmap = open(self.tagmap, 'w') tagmap.write("edit-at-create = edit-past\n") tagmap.write("also-edit = \n") tagmap.write("will-edit = edit-future\n") tagmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'tagmap', self.tagmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, tagmap=self.tagmap) tags = self.repo.tags()
def _loadwithfilemap(self, svndump, filemapcontent, failonmissing=True): repo_path = self.load_svndump(svndump) filemap = open(self.filemap, 'w') filemap.write(filemapcontent) filemap.close() ui = self.ui() ui.setconfig('hgsubversion', 'filemap', self.filemap) ui.setconfig('hgsubversion', 'failoninvalidreplayfile', 'true') ui.setconfig('hgsubversion', 'failonmissing', failonmissing) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, filemap=self.filemap) return self.repo
def test_branchmap(self): repo_path = self.load_svndump('branchmap.svndump') branchmap = open(self.branchmap, 'w') branchmap.write("badname = good-name # stuffy\n") branchmap.write("feature = default\n") branchmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'branchmap', self.branchmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, branchmap=self.branchmap) branches = set(self.repo[i].branch() for i in self.repo) self.assert_('badname' not in branches) self.assert_('good-name' in branches) self.assertEquals(self.repo[2].branch(), 'default')
def test_author_map(self): repo_path = self.load_svndump('replace_trunk_with_branch.svndump') authormap = open(self.authors, 'w') authormap.write('Augie=Augie Fackler <*****@*****.**> # stuffy\n') authormap.write("Augie Fackler <*****@*****.**>\n") authormap.close() ui = self.ui() ui.setconfig('hgsubversion', 'authormap', self.authors) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, authors=self.authors) self.assertEqual(self.repo[0].user(), 'Augie Fackler <*****@*****.**>') self.assertEqual(self.repo['tip'].user(), 'evil@5b65bade-98f3-4993-a01f-b7a6710da339')
def hg_clone(repository_url, destdir, revision, verbose): print 'clone repo revision %s to %s' % (revision, destdir,) if has_mercurial: u = ui.ui() repo = hg.repository(u, repository_url) rev = [revision,] if revision else None commands.clone(u, repo, destdir, rev = rev, verbose = verbose) else: if revision: cmd = 'hg clone -r%s %s %s' % (revision, repository_url, destdir) else: cmd = 'hg clone %s %s' % (repository_url, destdir) print cmd os.system(cmd) print 'cloned repo to %s' % (destdir,)
def test_tagmap(self): repo_path = self.load_svndump('basic_tag_tests.svndump') tagmap = open(self.tagmap, 'w') tagmap.write("tag_r3 = 3.x # stuffy\n") tagmap.write("copied_tag = \n") tagmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'tagmap', self.tagmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, tagmap=self.tagmap) tags = self.repo.tags() assert 'tag_r3' not in tags assert '3.x' in tags assert 'copied_tag' not in tags
def test_branchmap_verify(self): '''test verify on a branchmapped clone''' repo_path = self.load_svndump('branchmap.svndump') branchmap = open(self.branchmap, 'w') branchmap.write("badname = dit\n") branchmap.write("feature = dah\n") branchmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'branchmap', self.branchmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, branchmap=self.branchmap) repo = self.repo for r in repo: self.assertEquals(verify.verify(ui, repo, rev=r), 0)
def cache_cmd(ui, source=None, **opts): if source is None and not opts.get('update'): raise hg.util.Abort(_("either SOURCE or --update is required")) print source if opts.get('update'): for repo_d in os.listdir(CACHE): if source is None or repo_d == url_to_filename(source): ui.status('updating cache {}\n'.format(repo_d)) cache_peer = hg.peer(ui, {}, os.path.join(CACHE, repo_d)) commands.pull(cache_peer.ui, cache_peer.local(), noupdate=True) else: if hg.islocal(source): raise hg.util.Abort(_("not caching local repo {}".format(source))) cache_d = os.path.join(CACHE, url_to_filename(source)) ui.status(_('caching {} to {}\n'.format(source, cache_d))) commands.clone(ui, source, cache_d, noupdate=True)
def clone_repository(repository_clone_url, repository_file_dir, ctx_rev): """ Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository. """ try: commands.clone(get_configured_ui(), str(repository_clone_url), dest=str(repository_file_dir), pull=True, noupdate=False, rev=listify(str(ctx_rev))) return True, None except Exception, e: error_message = 'Error cloning repository: %s' % str(e) log.debug(error_message) return False, error_message
def _hg_repository_sync(name, url, submits, do_update=True): ui_ = ui() repopath = os.path.join(settings.REPOSITORY_BASE, name) configpath = os.path.join(repopath, '.hg', 'hgrc') if not os.path.isfile(configpath): if not os.path.isdir(os.path.dirname(repopath)): os.makedirs(os.path.dirname(repopath)) clone(ui_, str(url), str(repopath), pull=False, uncompressed=False, rev=[], noupdate=False) cfg = open(configpath, 'a') cfg.write('default-push = ssh%s\n' % str(url)[4:]) cfg.close() ui_.readconfig(configpath) hgrepo = repository(ui_, repopath) else: ui_.readconfig(configpath) hgrepo = repository(ui_, repopath) cs = submits[-1].changesets[-1] try: hgrepo.changectx(cs) except RepoError: pull(ui_, hgrepo, source=str(url), force=False, update=False, rev=[]) if do_update: # Make sure that we're not triggering workers in post 2.6 # hg. That's not stable, at least as we do it. # Monkey patch time try: from mercurial import worker if hasattr(worker, '_startupcost'): # use same value as hg for non-posix worker._startupcost = 1e30 except ImportError: # no worker, no problem pass update(ui_, hgrepo) return hgrepo
def test_author_map(self): repo_path = self.load_svndump('replace_trunk_with_branch.svndump') authormap = open(self.authors, 'w') authormap.write('Augie=Augie Fackler <*****@*****.**> # stuffy\n') authormap.write("Augie Fackler <*****@*****.**>\n") authormap.close() ui = self.ui() ui.setconfig('hgsubversion', 'authormap', self.authors) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, authors=self.authors) self.assertEqual(self.repo[0].user(), 'Augie Fackler <*****@*****.**>') self.assertEqual( revsymbol(self.repo, 'tip').user(), 'evil@5b65bade-98f3-4993-a01f-b7a6710da339')
def _checkoutUpstreamRevision(self, revision): """ Initial checkout (hg clone) """ from os import mkdir, rename, rmdir from os.path import exists, join self._getUI() # We have to clone the entire repository to be able to pull from it # later. So a partial checkout is a full clone followed by an update # directly to the desired revision. # If the basedir does not exist, create it if not exists(self.repository.basedir): mkdir(self.repository.basedir) # clone it only if .hg does not exist if not exists(join(self.repository.basedir, ".hg")): # Hg won't check out into an existing directory checkoutdir = join(self.repository.basedir, ".hgtmp") opts = self._defaultOpts('clone') opts['noupdate'] = True commands.clone(self._ui, self.repository.repository, checkoutdir, **opts) rename(join(checkoutdir, ".hg"), join(self.repository.basedir, ".hg")) rmdir(checkoutdir) else: # Does hgrc exist? If not, we write one hgrc = join(self.repository.basedir, ".hg", "hgrc") if not exists(hgrc): hgrc = file(hgrc, "w") hgrc.write( "[paths]\ndefault = %s\ndefault-push = %s\n" % (self.repository.repository, self.repository.repository)) hgrc.close() repo = self._getRepo() node = self._getNode(repo, revision) self.log.info('Extracting revision %r from %r into %r', revision, self.repository.repository, self.repository.basedir) repo.update(node) return self._changesetForRevision(repo, revision)
def test_branchmap_regex_and_glob(self): repo_path = self.load_svndump('branchmap.svndump') branchmap = open(self.branchmap, 'w') branchmap.write("syntax:re\n") branchmap.write("bad(.*) = good-\\1 # stuffy\n") branchmap.write("glob:feat* = default\n") branchmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'branchmap', self.branchmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, branchmap=self.branchmap) branches = set(self.repo[i].branch() for i in self.repo) self.assert_('badname' not in branches) self.assert_('good-name' in branches) self.assertEquals(self.repo[2].branch(), 'default')
def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ): """ Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository. """ try: commands.clone( get_configured_ui(), str( repository_clone_url ), dest=str( repository_file_dir ), pull=True, noupdate=False, rev=listify( str( ctx_rev ) ) ) return True, None except Exception, e: error_message = 'Error cloning repository: %s' % str( e ) log.debug( error_message ) return False, error_message
def install(self): """ Does the actual installation of this part. Be aware, that if the part was previously installed, it will get removed. """ self.log.info("Cloning repository %s to %s" % ( self.source, self.destination )) shutil.rmtree(self.destination, ignore_errors = True) commands.clone(ui.ui(), get_repository(self.source), self.destination) self.log.info("Updating to revision %s" % self.rev) if self.rev is not None: commands.update(ui.ui(), get_repository(self.destination), rev=self.rev) if self.as_egg: self._install_as_egg() return self.destination
def test_branchmap_no_replacement(self): '''test that empty mappings are accepted Empty mappings are lines like 'this ='. We check that such branches are not converted. ''' repo_path = self.load_svndump('branchmap.svndump') branchmap = open(self.branchmap, 'w') branchmap.write("badname =\n") branchmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'branchmap', self.branchmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, branchmap=self.branchmap) branches = set(self.repo[i].branch() for i in self.repo) self.assertEquals(sorted(branches), ['default', 'feature'])
def test_author_map_no_author(self): repo, repo_path = self.load_and_fetch('no-author.svndump') users = set(self.repo[r].user() for r in self.repo) expected_users = ['(no author)@%s' % self.repo.svnmeta().uuid] self.assertEqual(sorted(users), expected_users) test_util.rmtree(self.wc_path) authormap = open(self.authors, 'w') authormap.write("(no author)=Testy <*****@*****.**>") authormap.close() ui = self.ui() ui.setconfig('hgsubversion', 'authormap', self.authors) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, authors=self.authors) users = set(self.repo[r].user() for r in self.repo) expected_users = ['Testy <*****@*****.**>'] self.assertEqual(sorted(users), expected_users)
def test_truncated_history(self): # Test repository does not follow the usual layout repo_path = self.load_svndump('truncatedhistory.svndump') svn_url = test_util.fileurl(repo_path + '/project2') commands.clone(self.ui(), svn_url, self.wc_path, noupdate=True) repo = hg.repository(self.ui(), self.wc_path) # We are converting /project2/trunk coming from: # # Changed paths: # D /project1 # A /project2/trunk (from /project1:2) # # Here a full fetch should be performed since we are starting # the conversion on an already filled branch. tip = repo['tip'] files = tip.manifest().keys() files.sort() self.assertEqual(files, ['a', 'b']) self.assertEqual(repo['tip']['a'].data(), 'a\n')
def bb_create(ui, reponame, **opts): """Create repository on bitbucket""" data = { 'name': reponame, 'description': opts.get('description'), 'language': opts.get('language').lower(), 'website': opts.get('website'), 'scm': 'hg', } if opts.get('private'): data['is_private'] = True _bb_apicall(ui, 'repositories', data) # if this completes without exception, assume the request was successful, # and clone the new repo if opts['noclone']: ui.write('repository created\n') else: ui.write('repository created, cloning...\n') commands.clone(ui, 'bb://' + reponame)
def test_branchmap_combine(self): '''test combining two branches, but retaining heads''' repo_path = self.load_svndump('branchmap.svndump') branchmap = open(self.branchmap, 'w') branchmap.write("badname = default\n") branchmap.write("feature = default\n") branchmap.close() ui = self.ui() ui.setconfig('hgsubversion', 'branchmap', self.branchmap) commands.clone(ui, test_util.fileurl(repo_path), self.wc_path, branchmap=self.branchmap) branches = set(self.repo[i].branch() for i in self.repo) self.assertEquals(sorted(branches), ['default']) self.assertEquals(len(self.repo.heads()), 2) self.assertEquals(len(self.repo.branchheads('default')), 2) # test that the mapping does not affect branch info branches = self.repo.svnmeta().branches self.assertEquals(sorted(branches.keys()), [None, 'badname', 'feature'])
def test_path_quoting(self): repo_path = self.load_svndump('non_ascii_path_1.svndump') subdir = '/b\xC3\xB8b' quoted_subdir = urllib.quote(subdir) repo_url = test_util.fileurl(repo_path) wc_path = self.wc_path wc2_path = wc_path + '-2' ui = self.ui() commands.clone(ui, repo_url + subdir, wc_path) commands.clone(ui, repo_url + quoted_subdir, wc2_path) repo = hg.repository(ui, wc_path) repo2 = hg.repository(ui, wc2_path) self.assertEqual( revsymbol(repo, 'tip').extra()['convert_revision'], repo2['tip'].extra()['convert_revision']) self.assertEqual(test_util.repolen(repo), test_util.repolen(repo2)) for r in repo: self.assertEqual(repo[r].hex(), repo2[r].hex())
def create_repo(self, dest, ui): vct = 'http://hg.mozilla.org/hgcustom/version-control-tools' commands.clone(ui, vct, dest=os.path.join(dest, 'vct.hg')) ui.setconfig('extensions', 'pushlog', os.path.join(dest, 'vct.hg/hgext/pushlog')) srcdir = os.path.join(dest, 'test') destdir = os.path.join(dest, 'testwork') if not os.path.exists(srcdir): os.makedirs(srcdir) commands.init(ui, srcdir) commands.init(ui, destdir) repo = hg.repository(ui, destdir) myfile1 = os.path.join(destdir, 'myfile1') myfile2 = os.path.join(destdir, 'myfile2') for i in range(5): with open(myfile1, 'a') as In: In.write(str(i)) with open(myfile2, 'a') as In: In.write(str(i)) commands.commit(ui, repo, myfile1, myfile2, message='message' + str(i), user='******', addremove=True) commands.push(ui, repo, dest=srcdir) time.sleep(1.01) return srcdir
def clone_repository(location, target): u = setup_ui() commands.clone(u, location, target, noupdate=True)
def clone(self, dest): commands.clone(self.ui, self.repo, dest)
def clone(repo, from_source, to_source): try: commands.clone(repo.ui, from_source, to_source) except RepoError as e: traceback.print_exc(e) sys.exit()
def upload(self, trans, **kwd): params = util.Params(kwd) message = util.restore_text(params.get('message', '')) status = params.get('status', 'done') commit_message = util.restore_text( params.get('commit_message', 'Uploaded')) category_ids = util.listify(params.get('category_id', '')) categories = suc.get_categories(trans) repository_id = params.get('repository_id', '') repository = suc.get_repository_in_tool_shed(trans, repository_id) repo_dir = repository.repo_path(trans.app) repo = hg.repository(suc.get_configured_ui(), repo_dir) uncompress_file = util.string_as_bool( params.get('uncompress_file', 'true')) remove_repo_files_not_in_tar = util.string_as_bool( params.get('remove_repo_files_not_in_tar', 'true')) uploaded_file = None upload_point = self.__get_upload_point(repository, **kwd) tip = repository.tip(trans.app) file_data = params.get('file_data', '') url = params.get('url', '') # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new(trans.app) uploaded_directory = None if params.get('upload_button', False): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith('hg'): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[len('hg'):] repo_url = repo_url.encode('ascii', 'replace') commands.clone(suc.get_configured_ui(), repo_url, uploaded_directory) elif url: valid_url = True try: stream = urllib.urlopen(url) except Exception, e: valid_url = False message = 'Error uploading file via http: %s' % str(e) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, 'wb') while 1: chunk = stream.read(CHUNK_SIZE) if not chunk: break uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split('/')[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 elif file_data not in ('', None): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name uploaded_file_filename = os.path.split(file_data.filename)[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 if uploaded_file or uploaded_directory: ok = True isgzip = False isbz2 = False if uploaded_file: if uncompress_file: isgzip = checkers.is_gzip(uploaded_file_name) if not isgzip: isbz2 = checkers.is_bz2(uploaded_file_name) if isempty: tar = None istar = False else: # Determine what we have - a single file or an archive try: if (isgzip or isbz2) and uncompress_file: # Open for reading with transparent compression. tar = tarfile.open(uploaded_file_name, 'r:*') else: tar = tarfile.open(uploaded_file_name) istar = True except tarfile.ReadError, e: tar = None istar = False else: # Uploaded directory istar = False if istar: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_tar( trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) elif uploaded_directory: ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) else: if (isgzip or isbz2) and uncompress_file: uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2) if upload_point is not None: full_path = os.path.abspath( os.path.join(repo_dir, upload_point, uploaded_file_filename)) else: full_path = os.path.abspath( os.path.join(repo_dir, uploaded_file_filename)) # Move the uploaded file to the load_point within the repository hierarchy. shutil.move(uploaded_file_name, full_path) # See if any admin users have chosen to receive email alerts when a repository is # updated. If so, check every uploaded file to ensure content is appropriate. check_contents = suc.check_file_contents(trans) if check_contents and os.path.isfile(full_path): content_alert_str = self.__check_file_content( full_path) else: content_alert_str = '' commands.add(repo.ui, repo, full_path) # Convert from unicode to prevent "TypeError: array item must be char" full_path = full_path.encode('ascii', 'replace') commands.commit(repo.ui, repo, full_path, user=trans.user.username, message=commit_message) if full_path.endswith('tool_data_table_conf.xml.sample'): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. error, error_message = suc.handle_sample_tool_data_table_conf_file( trans.app, full_path) if error: message = '%s<br/>%s' % (message, error_message) # See if the content of the change set was valid. admin_only = len(repository.downloadable_revisions) != 1 suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only) if ok: # Update the repository files for browsing. suc.update_repository(repo) # Get the new repository tip. if tip == repository.tip(trans.app): message = 'No changes to repository. ' status = 'warning' else: if (isgzip or isbz2) and uncompress_file: uncompress_str = ' uncompressed and ' else: uncompress_str = ' ' if uploaded_directory: source_type = "repository" source = url else: source_type = "file" source = uploaded_file_filename message = "The %s '%s' has been successfully%suploaded to the repository. " % ( source_type, source, uncompress_str) if istar and (undesirable_dirs_removed or undesirable_files_removed): items_removed = undesirable_dirs_removed + undesirable_files_removed message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive. " % items_removed if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += " %d files were removed from the repository relative to the selected upload point '%s'. " % ( len(files_to_remove), upload_point) else: message += " %d files were removed from the repository root. " % len( files_to_remove) kwd['message'] = message suc.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd) # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch # or some other problem. if suc.get_config_from_disk('tool_dependencies.xml', repo_dir): if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload. metadata_dict = repository.metadata_revisions[ 0].metadata else: metadata_dict = {} if suc.has_orphan_tool_dependencies_in_tool_shed( metadata_dict): message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file", ' message += 'so one or more of the defined tool dependencies are considered orphans within this repository.' status = 'warning' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. suc.reset_tool_data_tables(trans.app) trans.response.send_redirect( web.url_for(controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status)) else: status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. suc.reset_tool_data_tables(trans.app)
def upload(self, trans, **kwd): message = escape(kwd.get('message', '')) status = kwd.get('status', 'done') commit_message = escape(kwd.get('commit_message', 'Uploaded')) category_ids = util.listify(kwd.get('category_id', '')) categories = suc.get_categories(trans.app) repository_id = kwd.get('repository_id', '') repository = suc.get_repository_in_tool_shed(trans.app, repository_id) repo_dir = repository.repo_path(trans.app) repo = hg_util.get_repo_for_repository(trans.app, repository=None, repo_path=repo_dir, create=False) uncompress_file = util.string_as_bool( kwd.get('uncompress_file', 'true')) remove_repo_files_not_in_tar = util.string_as_bool( kwd.get('remove_repo_files_not_in_tar', 'true')) uploaded_file = None upload_point = commit_util.get_upload_point(repository, **kwd) tip = repository.tip(trans.app) file_data = kwd.get('file_data', '') url = kwd.get('url', '') # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new(trans.app) uploaded_directory = None if kwd.get('upload_button', False): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith('hg'): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[len('hg'):] repo_url = repo_url.encode('ascii', 'replace') try: commands.clone(hg_util.get_configured_ui(), repo_url, uploaded_directory) except Exception, e: message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string( str(e)) status = 'error' basic_util.remove_dir(uploaded_directory) uploaded_directory = None elif url: valid_url = True try: stream = urllib.urlopen(url) except Exception, e: valid_url = False message = 'Error uploading file via http: %s' % str(e) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, 'wb') while 1: chunk = stream.read(util.CHUNK_SIZE) if not chunk: break uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split('/')[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0
def upload(self, trans, **kwd): message = escape(kwd.get('message', '')) status = kwd.get('status', 'done') commit_message = escape(kwd.get('commit_message', 'Uploaded')) repository_id = kwd.get('repository_id', '') repository = repository_util.get_repository_in_tool_shed(trans.app, repository_id) repo_dir = repository.repo_path(trans.app) repo = hg_util.get_repo_for_repository(trans.app, repository=None, repo_path=repo_dir, create=False) uncompress_file = util.string_as_bool(kwd.get('uncompress_file', 'true')) remove_repo_files_not_in_tar = util.string_as_bool(kwd.get('remove_repo_files_not_in_tar', 'true')) uploaded_file = None upload_point = commit_util.get_upload_point(repository, **kwd) tip = repository.tip(trans.app) file_data = kwd.get('file_data', '') url = kwd.get('url', '') # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new(trans.app) uploaded_directory = None if kwd.get('upload_button', False): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith('hg'): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[len('hg'):] repo_url = repo_url.encode('ascii', 'replace') try: commands.clone(hg_util.get_configured_ui(), repo_url, uploaded_directory) except Exception as e: message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string(str(e)) status = 'error' basic_util.remove_dir(uploaded_directory) uploaded_directory = None elif url: valid_url = True try: stream = requests.get(url, stream=True) except Exception as e: valid_url = False message = 'Error uploading file via http: %s' % str(e) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, 'wb') for chunk in stream.iter_content(chunk_size=util.CHUNK_SIZE): if chunk: uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split('/')[-1] isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 elif file_data not in ('', None): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name uploaded_file_filename = os.path.split(file_data.filename)[-1] isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 if uploaded_file or uploaded_directory: rdah = attribute_handlers.RepositoryDependencyAttributeHandler(trans.app, unpopulate=False) tdah = attribute_handlers.ToolDependencyAttributeHandler(trans.app, unpopulate=False) tdtm = data_table_manager.ToolDataTableManager(trans.app) ok = True isgzip = False isbz2 = False if uploaded_file: if uncompress_file: isgzip = checkers.is_gzip(uploaded_file_name) if not isgzip: isbz2 = checkers.is_bz2(uploaded_file_name) if isempty: tar = None istar = False else: # Determine what we have - a single file or an archive try: if (isgzip or isbz2) and uncompress_file: # Open for reading with transparent compression. tar = tarfile.open(uploaded_file_name, 'r:*') else: tar = tarfile.open(uploaded_file_name) istar = True except tarfile.ReadError as e: tar = None istar = False else: # Uploaded directory istar = False if istar: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ repository_content_util.upload_tar( trans, rdah, tdah, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) elif uploaded_directory: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_directory(trans, rdah, tdah, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert) else: if (isgzip or isbz2) and uncompress_file: uploaded_file_filename = commit_util.uncompress(repository, uploaded_file_name, uploaded_file_filename, isgzip=isgzip, isbz2=isbz2) if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION and \ uploaded_file_filename != rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: ok = False message = 'Repositories of type <b>Repository suite definition</b> can only contain a single file named ' message += '<b>repository_dependencies.xml</b>.' elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and \ uploaded_file_filename != rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: ok = False message = 'Repositories of type <b>Tool dependency definition</b> can only contain a single file named ' message += '<b>tool_dependencies.xml</b>.' if ok: if upload_point is not None: full_path = os.path.abspath(os.path.join(repo_dir, upload_point, uploaded_file_filename)) else: full_path = os.path.abspath(os.path.join(repo_dir, uploaded_file_filename)) # Move some version of the uploaded file to the load_point within the repository hierarchy. if uploaded_file_filename in [rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME]: # Inspect the contents of the file to see if toolshed or changeset_revision attributes # are missing and if so, set them appropriately. altered, root_elem, error_message = rdah.handle_tag_attributes(uploaded_file_name) if error_message: ok = False message = error_message status = 'error' elif altered: tmp_filename = xml_util.create_and_write_tmp_file(root_elem) shutil.move(tmp_filename, full_path) else: shutil.move(uploaded_file_name, full_path) elif uploaded_file_filename in [rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME]: # Inspect the contents of the file to see if changeset_revision values are # missing and if so, set them appropriately. altered, root_elem, error_message = tdah.handle_tag_attributes(uploaded_file_name) if error_message: ok = False message = error_message status = 'error' if ok: if altered: tmp_filename = xml_util.create_and_write_tmp_file(root_elem) shutil.move(tmp_filename, full_path) else: shutil.move(uploaded_file_name, full_path) else: shutil.move(uploaded_file_name, full_path) if ok: # See if any admin users have chosen to receive email alerts when a repository is updated. # If so, check every uploaded file to ensure content is appropriate. check_contents = commit_util.check_file_contents_for_email_alerts(trans.app) if check_contents and os.path.isfile(full_path): content_alert_str = commit_util.check_file_content_for_html_and_images(full_path) else: content_alert_str = '' hg_util.add_changeset(repo.ui, repo, full_path) # Convert from unicode to prevent "TypeError: array item must be char" full_path = full_path.encode('ascii', 'replace') hg_util.commit_changeset(repo.ui, repo, full_path_to_changeset=full_path, username=trans.user.username, message=commit_message) if full_path.endswith('tool_data_table_conf.xml.sample'): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded # by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables # dictionary. error, error_message = tdtm.handle_sample_tool_data_table_conf_file(full_path, persist=False) if error: message = '%s<br/>%s' % (message, error_message) # See if the content of the change set was valid. admin_only = len(repository.downloadable_revisions) != 1 suc.handle_email_alerts(trans.app, trans.request.host, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only) if ok: # Update the repository files for browsing. hg_util.update_repository(repo) # Get the new repository tip. if tip == repository.tip(trans.app): message = 'No changes to repository. ' status = 'warning' else: if (isgzip or isbz2) and uncompress_file: uncompress_str = ' uncompressed and ' else: uncompress_str = ' ' if uploaded_directory: source_type = "repository" source = url else: source_type = "file" source = uploaded_file_filename message = "The %s <b>%s</b> has been successfully%suploaded to the repository. " % \ (source_type, escape(source), uncompress_str) if istar and (undesirable_dirs_removed or undesirable_files_removed): items_removed = undesirable_dirs_removed + undesirable_files_removed message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) " % items_removed message += "were removed from the archive. " if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += " %d files were removed from the repository relative to the selected upload point '%s'. " % \ (len(files_to_remove), upload_point) else: message += " %d files were removed from the repository root. " % len(files_to_remove) rmm = repository_metadata_manager.RepositoryMetadataManager(app=trans.app, user=trans.user, repository=repository) status, error_message = \ rmm.set_repository_metadata_due_to_new_tip(trans.request.host, content_alert_str=content_alert_str, **kwd) if error_message: message = error_message kwd['message'] = message if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision # will be the tip just after an upload. metadata_dict = repository.metadata_revisions[0].metadata else: metadata_dict = {} dd = dependency_display.DependencyDisplayer(trans.app) if str(repository.type) not in [rt_util.REPOSITORY_SUITE_DEFINITION, rt_util.TOOL_DEPENDENCY_DEFINITION]: change_repository_type_message = rt_util.generate_message_for_repository_type_change(trans.app, repository) if change_repository_type_message: message += change_repository_type_message status = 'warning' else: # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies # weren't loaded due to a requirement tag mismatch or some other problem. Tool dependency # definitions can define orphan tool dependencies (no relationship to any tools contained in the # repository), so warning messages are important because orphans are always valid. The repository # owner must be warned in case they did not intend to define an orphan dependency, but simply # provided incorrect information (tool shed, name owner, changeset_revision) for the definition. orphan_message = dd.generate_message_for_orphan_tool_dependencies(repository, metadata_dict) if orphan_message: message += orphan_message status = 'warning' # Handle messaging for invalid tool dependencies. invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies(metadata_dict) if invalid_tool_dependencies_message: message += invalid_tool_dependencies_message status = 'error' # Handle messaging for invalid repository dependencies. invalid_repository_dependencies_message = \ dd.generate_message_for_invalid_repository_dependencies(metadata_dict, error_from_tuple=True) if invalid_repository_dependencies_message: message += invalid_repository_dependencies_message status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. tdtm.reset_tool_data_tables() if uploaded_directory: basic_util.remove_dir(uploaded_directory) trans.response.send_redirect(web.url_for(controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status)) else: if uploaded_directory: basic_util.remove_dir(uploaded_directory) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. tdtm.reset_tool_data_tables() return trans.fill_template('/webapps/tool_shed/repository/upload.mako', repository=repository, changeset_revision=tip, url=url, commit_message=commit_message, uncompress_file=uncompress_file, remove_repo_files_not_in_tar=remove_repo_files_not_in_tar, message=message, status=status)
def internal_push_over_svnserve(self, subdir='', commit=True): repo_path = self.load_svndump('simple_branch.svndump') open(os.path.join(repo_path, 'conf', 'svnserve.conf'), 'w').write('[general]\nanon-access=write\n[sasl]\n') self.port = random.randint(socket.IPPORT_USERRESERVED, 65535) self.host = socket.gethostname() # The `svnserve` binary appears to use the obsolete `gethostbyname(3)` # function, which always returns an IPv4 address, even on hosts that # support and expect IPv6. As a workaround, resolve the hostname # within the test harness with `getaddrinfo(3)` to ensure that the # client and server both use the same IPv4 or IPv6 address. try: addrinfo = socket.getaddrinfo(self.host, self.port) except socket.gaierror as e: # gethostname() can give a hostname that doesn't # resolve. Seems bad, but let's fall back to `localhost` in # that case and hope for the best. self.host = 'localhost' addrinfo = socket.getaddrinfo(self.host, self.port) # On macOS svn seems to have issues with IPv6 at least some of # the time, so try and bias towards IPv4. This works because # AF_INET is less than AF_INET6 on all platforms I've # checked. Hopefully any platform where that's not true will # be fine with IPv6 all the time. :) selected = sorted(addrinfo)[0] self.host = selected[4][0] # If we're connecting via IPv6 the need to put brackets around the # hostname in the URL. ipv6 = selected[0] == socket.AF_INET6 # Ditch any interface information since that's not helpful in # a URL if ipv6 and ':' in self.host and '%' in self.host: self.host = self.host.rsplit('%', 1)[0] urlfmt = 'svn://[%s]:%d/%s' if ipv6 else 'svn://%s:%d/%s' args = [ 'svnserve', '--daemon', '--foreground', '--listen-port=%d' % self.port, '--listen-host=%s' % self.host, '--root=%s' % repo_path ] svnserve = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) self.svnserve_pid = svnserve.pid try: time.sleep(2) import shutil shutil.rmtree(self.wc_path) commands.clone(self.ui(), urlfmt % (self.host, self.port, subdir), self.wc_path, noupdate=True) repo = self.repo old_tip = revsymbol(repo, 'tip').node() expected_parent = revsymbol(repo, 'default').node() def file_callback(repo, memctx, path): if path == 'adding_file': return compathacks.makememfilectx(repo, memctx=memctx, path=path, data='foo', islink=False, isexec=False, copied=False) raise IOError(errno.EINVAL, 'Invalid operation: ' + path) ctx = context.memctx(repo, parents=(revsymbol(repo, 'default').node(), node.nullid), text='automated test', files=['adding_file'], filectxfn=file_callback, user='******', date='2008-10-07 20:59:48 -0500', extra={ 'branch': 'default', }) new_hash = repo.commitctx(ctx) if not commit: return # some tests use this test as an extended setup. hg.update(repo, revsymbol(repo, 'tip').node()) oldauthor = revsymbol(repo, 'tip').user() commands.push(repo.ui, repo) tip = revsymbol(self.repo, 'tip') self.assertNotEqual(oldauthor, tip.user()) self.assertNotEqual(tip.node(), old_tip) self.assertEqual(tip.parents()[0].node(), expected_parent) self.assertEqual(tip['adding_file'].data(), 'foo') self.assertEqual(tip.branch(), 'default') # unintended behaviour: self.assertNotEqual('an_author', tip.user()) self.assertEqual('(no author)', tip.user().rsplit('@', 1)[0]) finally: if sys.version_info >= (2, 6): svnserve.kill() else: test_util.kill_process(svnserve)