def test_update(self): ''' Test 'clone --updaterev' ''' ui = self.ui() _dispatch(ui, ['init', self.wc_path]) repo = self.repo repo.ui.setconfig('ui', 'username', 'anonymous') fpath = os.path.join(self.wc_path, 'it') f = file(fpath, 'w') f.write('C1') f.flush() commands.add(ui, repo) commands.commit(ui, repo, message="C1") f.write('C2') f.flush() commands.commit(ui, repo, message="C2") f.write('C3') f.flush() commands.commit(ui, repo, message="C3") self.assertEqual(len(repo), 3) updaterev = 1 _dispatch(ui, ['clone', self.wc_path, self.wc_path + '2', '--updaterev=%s' % updaterev]) repo2 = hg.repository(ui, self.wc_path + '2') self.assertEqual(str(repo[updaterev]), str(repo2['.']))
def endWrite(self, withErrors): """Called when PUT has finished writing. See DAVResource.endWrite() """ if not withErrors: commands.add(self.provider.ui, self.provider.repo, self.localHgPath)
def writeIgnoreFile(self): eol = self.doseoln and '\r\n' or '\n' out = eol.join(self.ignorelines) + eol hasignore = os.path.exists(self.repo.vfs.join(self.ignorefile)) try: f = util.atomictempfile(self.ignorefile, 'wb', createmode=None) f.write(out) f.close() if not hasignore: ret = qtlib.QuestionMsgBox( _('New file created'), _('TortoiseHg has created a new ' '.hgignore file. Would you like to ' 'add this file to the source code ' 'control repository?'), parent=self) if ret: commands.add(hglib.loadui(), self.repo, self.ignorefile) shlib.shell_notify([self.ignorefile]) self.ignoreFilterUpdated.emit() except EnvironmentError, e: qtlib.WarningMsgBox(_('Unable to write .hgignore file'), hglib.tounicode(str(e)), parent=self)
def test_reload_changectx_working_dir(self): app = create_app(repo_path=self.repo_path, revision_id=REVISION_WORKING_DIR) client = app.test_client() rv = client.get('/') self.assertTrue(b'post/lorem-ipsum' in rv.data) self.assertTrue(b'post/example-post' in rv.data) commands.add(self.ui, self.repo) rv = client.get('/') self.assertTrue(b'post/lorem-ipsum' in rv.data) self.assertTrue(b'post/example-post' in rv.data) commands.commit(self.ui, self.repo, message=b'foo', user=b'foo') rv = client.get('/') self.assertTrue(b'post/lorem-ipsum' in rv.data) self.assertTrue(b'post/example-post' in rv.data) with codecs.open(os.path.join(self.repo_path, app.config['CONTENT_DIR'], 'about.rst'), 'a', encoding='utf-8') as fp: fp.write('\n\nTHIS IS A TEST!\n') rv = client.get('/about/') self.assertTrue(b'THIS IS A TEST!' in rv.data) commands.commit(self.ui, self.repo, message=b'foo', user=b'foo') rv = client.get('/about/') self.assertTrue(b'THIS IS A TEST!' in rv.data) with codecs.open(os.path.join(self.repo_path, app.config['CONTENT_DIR'], 'about.rst'), 'a', encoding='utf-8') as fp: fp.write('\n\nTHIS IS another TEST!\n') rv = client.get('/about/') self.assertTrue(b'THIS IS another TEST!' in rv.data) commands.commit(self.ui, self.repo, message=b'foo', user=b'foo') rv = client.get('/about/') self.assertTrue(b'THIS IS another TEST!' in rv.data)
def add(parent, ui, repo, files): haslf = "largefiles" in repo.extensions() haskbf = "kbfiles" in repo.extensions() if haslf or haskbf: result = lfprompt.promptForLfiles(parent, ui, repo, files, haskbf) if not result: return False files, lfiles = result for name, module in extensions.extensions(): if name == "largefiles": override_add = module.overrides.override_add if files: override_add(commands.add, ui, repo, *files) if lfiles: override_add(commands.add, ui, repo, large=True, *lfiles) return True if name == "kbfiles": override_add = module.bfsetup.override_add if files: override_add(commands.add, ui, repo, *files) if lfiles: override_add(commands.add, ui, repo, bf=True, *lfiles) return True commands.add(ui, repo, *files) return True
def commit_multi(proj, asset, filenames, text, username=None): """Commit multiple files to the repository and returns the revision id.""" repo_path = os.path.join(G.REPOSITORY, proj) repo = repo_get(proj) if not isinstance(filenames, list): raise SPAMRepoError('expected a list of files for asset %s' % asset.id) text = 'asset %s - %s' % (asset.id, text) encodedtext = text.encode('utf-8') target_sequence_path = asset.path.replace('#', '%04d') targets = [] for i, filename in enumerate(filenames): n = i + 1 uploadedfile = os.path.join(G.UPLOAD, filename) target_path = (target_sequence_path % n).encode() target_repo_path = os.path.join(repo_path, target_path) if not os.path.exists(os.path.dirname(target_repo_path)): os.makedirs(os.path.dirname(target_repo_path)) shutil.move(uploadedfile, target_repo_path) if not target_path in repo['tip']: commands.add(repo_ui, repo, target_repo_path) targets.append(target_path) matched = match.exact(repo.root, repo.getcwd(), targets) commit_id = repo.commit(encodedtext, user=username, match=matched) if commit_id: return repo[commit_id].hex() else: return None
def test_reload_changectx_working_dir(self): app = create_app(repo_path=self.repo_path, revision_id=REVISION_WORKING_DIR) client = app.test_client() rv = client.get('/') self.assertTrue('post/lorem-ipsum' in rv.data) self.assertTrue('post/example-post' in rv.data) commands.add(self.ui, self.repo) rv = client.get('/') self.assertTrue('post/lorem-ipsum' in rv.data) self.assertTrue('post/example-post' in rv.data) commands.commit(self.ui, self.repo, message='foo', user='******') rv = client.get('/') self.assertTrue('post/lorem-ipsum' in rv.data) self.assertTrue('post/example-post' in rv.data) with codecs.open(os.path.join(self.repo_path, app.config['CONTENT_DIR'], 'about.rst'), 'a', encoding='utf-8') as fp: fp.write('\n\nTHIS IS A TEST!\n') rv = client.get('/about/') self.assertTrue('THIS IS A TEST!' in rv.data) commands.commit(self.ui, self.repo, message='foo', user='******') rv = client.get('/about/') self.assertTrue('THIS IS A TEST!' in rv.data) with codecs.open(os.path.join(self.repo_path, app.config['CONTENT_DIR'], 'about.rst'), 'a', encoding='utf-8') as fp: fp.write('\n\nTHIS IS another TEST!\n') rv = client.get('/about/') self.assertTrue('THIS IS another TEST!' in rv.data) commands.commit(self.ui, self.repo, message='foo', user='******') rv = client.get('/about/') self.assertTrue('THIS IS another TEST!' in rv.data)
def test_branch(self): ''' Test 'clone --branch' ''' ui = self.ui() _dispatch(ui, ['init', self.wc_path]) repo = self.repo repo.ui.setconfig('ui', 'username', 'anonymous') fpath = os.path.join(self.wc_path, 'it') f = file(fpath, 'w') f.write('C1') f.flush() commands.add(ui, repo) commands.branch(ui, repo, label="B1") commands.commit(ui, repo, message="C1") f.write('C2') f.flush() commands.branch(ui, repo, label="default") commands.commit(ui, repo, message="C2") f.write('C3') f.flush() commands.branch(ui, repo, label="B2") commands.commit(ui, repo, message="C3") self.assertEqual(len(repo), 3) branch = 'B1' _dispatch(ui, ['clone', self.wc_path, self.wc_path + '2', '--branch', branch]) repo2 = hg.repository(ui, self.wc_path + '2') self.assertEqual(repo[branch].hex(), repo2['.'].hex())
def test_update(self): ''' Test 'clone --updaterev' ''' ui = self.ui() _dispatch(ui, ['init', '--quiet', self.wc_path]) repo = self.repo repo.ui.setconfig('ui', 'username', 'anonymous') fpath = os.path.join(self.wc_path, 'it') f = file(fpath, 'w') f.write('C1') f.flush() commands.add(ui, repo) commands.commit(ui, repo, message="C1") f.write('C2') f.flush() commands.commit(ui, repo, message="C2") f.write('C3') f.flush() commands.commit(ui, repo, message="C3") self.assertEqual(test_util.repolen(repo), 3) updaterev = 1 _dispatch(ui, [ 'clone', '--quiet', self.wc_path, self.wc_path + '2', '--updaterev=%s' % updaterev ]) repo2 = hg.repository(ui, self.wc_path + '2') self.assertEqual(str(repo[updaterev]), str(repo2['.']))
def store_results(self, results): base_fn = "results_%s.cpkl" % self.name fn = self.repo.pathto(base_fn) cPickle.dump(results, open(fn, "w")) if base_fn not in self.repo['tip'].manifest(): commands.add(self.ui, self.repo, fn) message = "Committing results from current run" commands.commit(self.ui, self.repo, fn, message=message) print "Committed"
def create_test_changesets(self, repo, count=1, dates=[]): for i in range(count): filename = repo_test_utils._create_random_file(self.directory) commands.add(repo.get_ui(), repo.get_repo(), filename) date=None if i < len(dates): date=dates[i] commands.commit(repo.get_ui(), repo.get_repo(), date=date, message="creating test commit", user='******')
def run_try(ui, repo, *args, **opts): """Push the current head to try """ if not opts['build'] or not opts['platform']: raise util.Abort('Both -b and -p are required') # We rely on the try server to validate anything beyond that simple # check above, so let's just blindly go about our business! tryopts = [] tryopts.append('-b') tryopts.extend(opts['build']) tryopts.append('-p') tryopts.extend(opts['platform']) if opts.get('unit'): tryopts.append('-u') tryopts.extend(opts['unit']) if opts.get('talos'): tryopts.append('-t') tryopts.extend(opts['talos']) trymsg = 'try: %s' % (' '.join(tryopts),) if repo[None].dirty(): raise util.Abort('You have outstanding changes') try: strip = extensions.find('strip') except KeyError: ui.warn('strip extension not found, use the following syntax:\n') ui.write('%s\n' % (trymsg,)) return ui.write('setting try selections...\n') # This next bit here is a hack to get an empty commit cwd = os.getcwd() junkfile = tempfile.mktemp(prefix='hgjunk', dir='') os.chdir(repo.root) file(junkfile, 'w').close() commands.add(ui, repo, junkfile) commands.commit(ui, repo, message='add junk file (will be gone)') commands.remove(ui, repo, junkfile) commands.commit(ui, repo, amend=True, message=trymsg, logfile=None) os.chdir(cwd) # Get the revision of our try commit so we can strip it later node = repo[None].p1().hex() ui.write('pushing to try...\n') commands.push(ui, repo, 'try', force=True) # Now we must clean up after ourslves by stripping the try commit strip.stripcmd(ui, repo, node, rev=[], no_backup=True)
def hg_add(self, single=None): """Adds all files to Mercurial when the --watch options is passed This only happens one time. All consequent files are not auto added to the watch list.""" repo = hg.repository(ui.ui(), self.path) if single is None: commands.add(ui.ui(), repo=repo) hg_log.debug('added files to repo %s' % self.path) else: commands.add(ui.ui(), repo, single) hg_log.debug('added files to repo %s' % self.path)
def commit_results(self, msg_id, submission_tuple, results): """ INTERNAL: Commit the results of a submission to the local repo. """ print "RESULTS: ", results if len(results[3]) > 0 and sum([len(results[index]) for index in (0, 1, 2, 4)]) == 0: #HACK, fix order! raise NoChangesError() assert sum([len(results[index]) for index in (0, 1, 2, 4)]) > 0 wikitext_dir = os.path.join(self.full_base_path(), 'wikitext') raised = True # grrr pylint gives spurious #pylint: disable-msg=E1101 self.ui_.pushbuffer() try: # hg add new files. for name in results[0]: full_path = os.path.join(wikitext_dir, name) commands.add(self.ui_, self.repo, full_path) # hg add fork files for name in results[4]: full_path = os.path.join(wikitext_dir, name) commands.add(self.ui_, self.repo, full_path) # hg remove removed files. for name in results[2]: full_path = os.path.join(wikitext_dir, name) commands.remove(self.ui_, self.repo, full_path) # Writes to/prunes special file used to generate RemoteChanges. self.update_change_log(msg_id, submission_tuple, results, True) # REDFLAG: LATER, STAKING? later allow third field for staker. # fms_id|chk commit_msg = "%s|%s" % (submission_tuple[0], submission_tuple[3]) # hg commit commands.commit(self.ui_, self.repo, logfile=None, addremove=None, user=None, date=None, message=commit_msg) self.fixup_accepted_log() # Fix version in accepted.txt self.notify_committed(True) raised = False finally: text = self.ui_.popbuffer() if raised: self.logger.debug("commit_results -- popped log:\n%s" % text)
def setUp(self): self.repo_path = mkdtemp() self.ui = ui.ui() self.ui.setconfig('ui', 'username', 'foo <*****@*****.**>') self.ui.setconfig('ui', 'quiet', True) commands.init(self.ui, self.repo_path) self.repo = hg.repository(self.ui, self.repo_path) file_dir = os.path.join(self.repo_path, 'content') if not os.path.isdir(file_dir): os.makedirs(file_dir) for i in range(3): file_path = os.path.join(file_dir, 'page-%i.rst' % i) with codecs.open(file_path, 'w', encoding='utf-8') as fp: fp.write(SAMPLE_PAGE) commands.add(self.ui, self.repo, file_path) file_path = os.path.join(file_dir, 'about.rst') with codecs.open(file_path, 'w', encoding='utf-8') as fp: fp.write(SAMPLE_PAGE + """ .. aliases: 301:/my-old-post-location/,/another-old-location/""") commands.add(self.ui, self.repo, file_path) file_dir = os.path.join(self.repo_path, 'content', 'post') if not os.path.isdir(file_dir): os.makedirs(file_dir) for i in range(3): file_path = os.path.join(file_dir, 'post-%i.rst' % i) with codecs.open(file_path, 'w', encoding='utf-8') as fp: fp.write(SAMPLE_POST) commands.add(self.ui, self.repo, file_path) file_path = os.path.join(file_dir, 'foo.rst') with codecs.open(file_path, 'w', encoding='utf-8') as fp: # using the page template, because we want to set tags manually fp.write(SAMPLE_PAGE + """ .. tags: foo, bar, lol""") commands.add(self.ui, self.repo, file_path) commands.commit(self.ui, self.repo, message='foo', user='******')
def createCollection(self, name): """Create a new collection as member of self. A dummy member is created, because Mercurial doesn't handle folders. """ assert self.isCollection self._checkWriteAccess() collpath = self._getFilePath(name) os.mkdir(collpath) filepath = self._getFilePath(name, ".directory") f = open(filepath, "w") f.write("Created by WsgiDAV.") f.close() commands.add(self.provider.ui, self.provider.repo, filepath)
def createEmptyResource(self, name): """Create and return an empty (length-0) resource as member of self. See DAVResource.createEmptyResource() """ assert self.isCollection self._checkWriteAccess() filepath = self._getFilePath(name) f = open(filepath, "w") f.close() commands.add(self.provider.ui, self.provider.repo, filepath) # getResourceInst() won't work, because the cached manifest is outdated # return self.provider.getResourceInst(self.path.rstrip("/")+"/"+name, self.environ) return HgResource(self.path.rstrip("/")+"/"+name, False, self.environ, self.rev, self.localHgPath+"/"+name)
def test_reload_changectx_default(self): app = create_app(repo_path=self.repo_path, autoinit=False) commands.add(self.ui, self.repo) commands.forget(self.ui, self.repo, os.path.join(self.repo_path, app.config['CONTENT_DIR'])) commands.commit(self.ui, self.repo, message='foo', user='******') app.blohg.init_repo(REVISION_DEFAULT) client = app.test_client() rv = client.get('/') self.assertFalse('post/lorem-ipsum' in rv.data) self.assertFalse('post/example-post' in rv.data) commands.add(self.ui, self.repo) rv = client.get('/') self.assertFalse('post/lorem-ipsum' in rv.data) self.assertFalse('post/example-post' in rv.data) commands.commit(self.ui, self.repo, message='foo', user='******') rv = client.get('/') self.assertTrue('post/lorem-ipsum' in rv.data) self.assertTrue('post/example-post' in rv.data) with codecs.open(os.path.join(self.repo_path, app.config['CONTENT_DIR'], 'about.rst'), 'a', encoding='utf-8') as fp: fp.write('\n\nTHIS IS A TEST!\n') rv = client.get('/about/') self.assertFalse('THIS IS A TEST!' in rv.data) commands.commit(self.ui, self.repo, message='foo', user='******') rv = client.get('/about/') self.assertTrue('THIS IS A TEST!' in rv.data) with codecs.open(os.path.join(self.repo_path, app.config['CONTENT_DIR'], 'about.rst'), 'a', encoding='utf-8') as fp: fp.write('\n\nTHIS IS another TEST!\n') rv = client.get('/about/') self.assertTrue('THIS IS A TEST!' in rv.data) self.assertFalse('THIS IS another TEST!' in rv.data) commands.commit(self.ui, self.repo, message='foo', user='******') rv = client.get('/about/') self.assertTrue('THIS IS A TEST!' in rv.data) self.assertTrue('THIS IS another TEST!' in rv.data)
def test_reload_changectx_default(self): app = create_app(repo_path=self.repo_path, autoinit=False) commands.add(self.ui, self.repo) commands.forget(self.ui, self.repo, u2hg(os.path.join(self.repo_path, app.config['CONTENT_DIR']))) commands.commit(self.ui, self.repo, message=b'foo', user=b'foo') app.blohg.init_repo(REVISION_DEFAULT) client = app.test_client() rv = client.get('/') self.assertFalse(b'post/lorem-ipsum' in rv.data) self.assertFalse(b'post/example-post' in rv.data) commands.add(self.ui, self.repo) rv = client.get('/') self.assertFalse(b'post/lorem-ipsum' in rv.data) self.assertFalse(b'post/example-post' in rv.data) commands.commit(self.ui, self.repo, message=b'foo', user=b'foo') rv = client.get('/') self.assertTrue(b'post/lorem-ipsum' in rv.data) self.assertTrue(b'post/example-post' in rv.data) with codecs.open(os.path.join(self.repo_path, app.config['CONTENT_DIR'], 'about.rst'), 'a', encoding='utf-8') as fp: fp.write('\n\nTHIS IS A TEST!\n') rv = client.get('/about/') self.assertFalse(b'THIS IS A TEST!' in rv.data) commands.commit(self.ui, self.repo, message=b'foo', user=b'foo') rv = client.get('/about/') self.assertTrue(b'THIS IS A TEST!' in rv.data) with codecs.open(os.path.join(self.repo_path, app.config['CONTENT_DIR'], 'about.rst'), 'a', encoding='utf-8') as fp: fp.write('\n\nTHIS IS another TEST!\n') rv = client.get('/about/') self.assertTrue(b'THIS IS A TEST!' in rv.data) self.assertFalse(b'THIS IS another TEST!' in rv.data) commands.commit(self.ui, self.repo, message=b'foo', user=b'foo') rv = client.get('/about/') self.assertTrue(b'THIS IS A TEST!' in rv.data) self.assertTrue(b'THIS IS another TEST!' in rv.data)
def repo_init(proj): """Init a new mercurial repository for ``proj``.""" repo_path = os.path.join(G.REPOSITORY, proj) try: repo = repo_get(proj) except SPAMRepoNotFound: commands.init(repo_ui, repo_path) repo = repo_get(proj) hgignore_path = os.path.join(G.REPOSITORY, proj, '.hgignore') if not os.path.exists(hgignore_path): hgignore = open(hgignore_path, 'w') hgignore.write('syntax: regexp\n') hgignore.write('^.previews/') hgignore.close() if not '.hgignore' in repo['tip']: commands.add(repo_ui, repo, hgignore_path) matched = match.exact(repo.root, repo.getcwd(), ['.hgignore']) commit_id = repo.commit('add .hgignore', user='******', match=matched)
def addPath(self, path, rules=[]): try: if not opath.exists(path): error("Path", path, "does not exist") except TypeError: error(path, "is not a path name") include = [] exclude = [] if rules != []: for inclQ, patt in rules: if inclQ: include.append("re:" + patt) else: exclude.append("re:" + patt) commands.add(self.ui, self.repo, path, include=include, exclude=exclude)
def add_file(self, path): if self.vcs_type == 'git': # git wants a relative path path = path[len(self.path) + 1:] self.r.stage(path.encode('utf-8')) # FIXME: does not work if there was an # issue with other uncommitted things self.r.do_commit( message='commit {0}'.format(path.encode('utf-8'))) elif self.vcs_type == 'hg': #_lock = self.r.lock() print '=' * 35 print self.r.root print path print '=' * 35 hg.add(self.ui, self.r, path.encode('utf-8')) hg.commit( self.ui, self.r, path.encode('utf-8'), message='commit {0}'.format(path))
def commit(self, key, data): """ commit changed ``data`` to the entity identified by ``key``. """ try: fobj = open(os.path.join(self.location, key), 'w') except IOError: #parent directory seems to be missing os.makedirs(os.path.dirname(os.path.join(self.location, key))) return self.commit(key, data) fobj.write(data) fobj.close() u = self.hg_ui repo = hg.repository(u, self.location) try: commands.add(u, repo, os.path.join(self.location, key)) except: raise commands.commit(u, repo, message='auto commit from django')
def writeIgnoreFile(self): eol = self.doseoln and '\r\n' or '\n' out = eol.join(self.ignorelines) + eol hasignore = os.path.exists(self.repo.join(self.ignorefile)) try: f = util.atomictempfile(self.ignorefile, 'wb', createmode=None) f.write(out) f.close() if not hasignore: ret = qtlib.QuestionMsgBox(_('New file created'), _('TortoiseHg has created a new ' '.hgignore file. Would you like to ' 'add this file to the source code ' 'control repository?'), parent=self) if ret: commands.add(ui.ui(), self.repo, self.ignorefile) shlib.shell_notify([self.ignorefile]) self.ignoreFilterUpdated.emit() except EnvironmentError, e: qtlib.WarningMsgBox(_('Unable to write .hgignore file'), hglib.tounicode(str(e)), parent=self)
def openRepo(self): # Create a new repository or continue from aborted dump self.ui=ui.ui() self.last_names = {} # Tracks page renames: name atm -> last name in repo self.last_parents = {} # Tracks page parent names: name atm -> last parent in repo if os.path.isfile(self.path+'\\.wstate'): print "Continuing from aborted dump state..." self.loadState() self.repo = hg.repository(self.ui, self.path) else: # create a new repository (will fail if one exists) print "Initializing repository..." commands.init(self.ui, self.path) self.repo = hg.repository(self.ui, self.path) self.rev_no = 0 if self.storeRevIds: # Add revision id file to the new repo fname = self.path+'\\.revid' codecs.open(fname, "w", "UTF-8").close() commands.add(self.ui, self.repo, str(fname))
def commit_revision(self, repo, raw_files, msg='no comment'): # DCI: Assert working dir is tip? manifest = repo['tip'].manifest() for fname, raw_bytes in raw_files: full_path = os.path.join(repo.root, fname) dname = os.path.dirname(full_path) if dname and not os.path.exists(dname): print "CREATED: ", dname os.makedirs(dname) out_file = open(full_path, 'wb') try: out_file.write(raw_bytes) finally: out_file.close() if not fname in manifest: commands.add(self.ui_, repo, full_path) commands.commit(self.ui_, repo, logfile=None, addremove=None, user=None, date=None, message=msg)
def commit(self, key, data): """ commit changed ``data`` to the entity identified by ``key``. """ try: fobj = open(os.path.join(self.location, key), 'w') except IOError: #parent directory seems to be missing os.makedirs(os.dirname(os.path.join(self.location, key))) return self.commit(key, data) fobj.write(data) fobj.close() u = self.hg_ui repo = hg.repository(u, self.location) try: commands.add(u, repo, os.path.join(self.location, key)) except: raise commands.commit(u, repo, message='auto commit from django')
def addPath(self, path, rules=[]): try: if not opath.exists(path): error("Path",path,"does not exist") except TypeError: error(path,"is not a path name") include=[] exclude=[] if rules!=[]: for inclQ,patt in rules: if inclQ: include.append("re:"+patt) else: exclude.append("re:"+patt) commands.add(self.ui, self.repo, path, include=include, exclude=exclude)
def test_branch(self): ''' Test 'clone --branch' ''' ui = self.ui() _dispatch(ui, ['init', '--quiet', self.wc_path]) repo = self.repo repo.ui.setconfig('ui', 'username', 'anonymous') fpath = os.path.join(self.wc_path, 'it') f = file(fpath, 'w') f.write('C1') f.flush() commands.add(ui, repo) commands.branch(ui, repo, label="B1") commands.commit(ui, repo, message="C1") f.write('C2') f.flush() commands.branch(ui, repo, label="default") commands.commit(ui, repo, message="C2") f.write('C3') f.flush() commands.branch(ui, repo, label="B2") commands.commit(ui, repo, message="C3") self.assertEqual(test_util.repolen(repo), 3) branch = 'B1' _dispatch(ui, [ 'clone', '--quiet', self.wc_path, self.wc_path + '2', '--branch', branch ]) repo2 = hg.repository(ui, self.wc_path + '2') self.assertEqual( revsymbol(repo, branch).hex(), revsymbol(repo2, '.').hex())
def openRepo(self): # Create a new repository or continue from aborted dump self.ui = ui.ui() self.last_names = { } # Tracks page renames: name atm -> last name in repo self.last_parents = { } # Tracks page parent names: name atm -> last parent in repo if os.path.isfile(self.path + '\\.wstate'): print "Continuing from aborted dump state..." self.loadState() self.repo = hg.repository(self.ui, self.path) else: # create a new repository (will fail if one exists) print "Initializing repository..." commands.init(self.ui, self.path) self.repo = hg.repository(self.ui, self.path) self.rev_no = 0 if self.storeRevIds: # Add revision id file to the new repo fname = self.path + '\\.revid' codecs.open(fname, "w", "UTF-8").close() commands.add(self.ui, self.repo, str(fname))
def add(parent, ui, repo, files): if 'largefiles' in repo.extensions(): result = lfprompt.promptForLfiles(parent, ui, repo, files) if not result: return False files, lfiles = result if files: commands.add(ui, repo, normal=True, *files) if lfiles: commands.add(ui, repo, lfsize='', normal=False, large=True, *lfiles) else: commands.add(ui, repo, *files) return True
def add(self, paths=()): commands.add(self.ui, self.repo, *self.joined(paths))
def commitNext(self): if self.rev_no >= len(self.wrevs): return False rev = self.wrevs[self.rev_no] source = self.wd.get_revision_source(rev['rev_id']) # Page title and unix_name changes are only available through another request: details = self.wd.get_revision_version(rev['rev_id']) # Store revision_id for last commit # Without this, empty commits (e.g. file uploads) will be skipped by Mercurial if self.storeRevIds: fname = self.path + '\\.revid' outp = codecs.open(fname, "w", "UTF-8") outp.write( rev['rev_id'] ) # rev_ids are unique amongst all pages, and only one page changes in each commit anyway outp.close() unixname = rev['page_name'] rev_unixname = details[ 'unixname'] # may be different in revision than atm # Unfortunately, there's no exposed way in Wikidot to see page breadcrumbs at any point in history. # The only way to know they were changed is revision comments, though evil people may trick us. if rev['comment'].startswith('Parent page set to: "'): # This is a parenting revision, remember the new parent parent_unixname = rev['comment'][21:-2] self.last_parents[unixname] = parent_unixname else: # Else use last parent_unixname we've recorded parent_unixname = self.last_parents[ unixname] if unixname in self.last_parents else None # There are also problems when parent page gets renamed -- see updateChildren # If the page is tracked and its name just changed, tell HG rename = (unixname in self.last_names) and (self.last_names[unixname] <> rev_unixname) if rename: self.updateChildren( self.last_names[unixname], rev_unixname ) # Update children which reference us -- see comments there commands.rename( self.ui, self.repo, self.path + '\\' + str(self.last_names[unixname]) + '.txt', self.path + '\\' + str(rev_unixname) + '.txt') # Ouput contents fname = self.path + '\\' + rev_unixname + '.txt' outp = codecs.open(fname, "w", "UTF-8") if details['title']: outp.write('title:' + details['title'] + '\n') if parent_unixname: outp.write('parent:' + parent_unixname + '\n') outp.write(source) outp.close() # Add new page if not unixname in self.last_names: # never before seen commands.add(self.ui, self.repo, str(fname)) self.last_names[unixname] = rev_unixname # Commit if rev['comment'] <> '': commit_msg = rev_unixname + ': ' + rev['comment'] else: commit_msg = rev_unixname if rev['date']: commit_date = str(rev['date']) + ' 0' else: commit_date = None print "Commiting: " + str(self.rev_no) + '. ' + commit_msg commands.commit(self.ui, self.repo, message=commit_msg, user=rev['user'], date=commit_date) self.rev_no += 1 self.saveState() # Update operation state return True
def file(cls, repo, path, contents): p = repo.pathto(path) open(p, 'w').write(contents) if repo.dirstate[path] not in 'amn': hgcommands.add(repo.ui, repo, 'path:' + path)
def _track(ui, repo, path): """ Adds new files to Mercurial. """ if os.path.exists(path): ui.pushbuffer() commands.add(ui, repo, path) ui.popbuffer()
def add_latest_rev_and_toolshed(repo, **kwargs): """ Iterating over all, but the ignored mercurial files. If a file is called tool_dependencies.xml or repository_dependencies.xml we check if 'changeset_revision' and/or 'toolshed' is not set or empty and insert the latest revision of the corresponding repo (repo-name/owner/tooshed). The default tool_shed url is hardcoded and can be changed. This hook creates a backup of the original file, replaces revision number and toolshed and commit the adopted changes. To restore the backup files use the additional script (toolshed_pretxncommit_hook.py) as pretxncommit-hook. Add the following to your .hgrc: [hooks] pre-commit = python:.hg/toolshed_pre-commit_hook.py:add_latest_rev_and_toolshed """ toolshed_url = "http://testtoolshed.g2.bx.psu.edu/" # to force a commit the user can add a temporary file called: force_pre-commit_hook_temp_file # we will forget that file, because it should only force the execution of that function commands.forget( ui.ui(), repo, 'force_pre-commit_hook_temp_file' ) logging.info('Emtering pre-commit Hook: Updating "toolshed" and/or "changeset_revision" attribute.') filename_categories = repo.status( clean=True ) filepaths = [item for sublist in filename_categories for item in sublist] backup_files = list() for filepath in filepaths: if os.path.split( filepath )[-1] in ['tool_dependencies.xml', 'repository_dependencies.xml']: tree = ET.parse( filepath, parser = CommentedTreeBuilder() ) root = tree.getroot() change = False for repo_dep in root.iter('repository'): if repo_dep.attrib.get('changeset_revision', '') == '': logging.info('Change *changeset_revision* of [%s]\n in file: %s\n and repository: %s' % ('%s :: %s' % (repo_dep.attrib['owner'], repo_dep.attrib['name']), filepath, repo.url())) tip = get_latest_repo_rev( '%srepos/%s/%s' % (toolshed_url, repo_dep.attrib['owner'], repo_dep.attrib['name']) ) repo_dep.attrib.update({'changeset_revision': "%s" % tip}) change = True if repo_dep.attrib.get('toolshed', '') == '': logging.info('Change *toolshed* of [%s]\n in file: %s\n and repository: %s' % ('%s :: %s' % (repo_dep.attrib['owner'], repo_dep.attrib['name']), filepath, repo.url())) repo_dep.attrib.update({'toolshed': "http://testtoolshed.g2.bx.psu.edu/"}) change = True if change: backup_filepath = '%s.pre-commit-backup' % filepath backup_files.append( backup_filepath ) shutil.move( filepath, backup_filepath ) tree.write( filepath, xml_declaration=True, encoding='utf-8' ) logging.info('Add %s to repository: %s' % (filepath, repo.url())) commands.add( ui.ui(), repo, filepath ) # check if there is anything to commit if not [diff for diff in patch.diff(repo)]: logging.info( 'Nothing to commit for repository: %s.' % repo.url() ) # if nothing to commit, restore the original files # these is necessary because I could not find a 'nothing to commit'-hook for backup_file in backup_files: if os.path.split( backup_file )[-1] in ['tool_dependencies.xml.pre-commit-backup', 'repository_dependencies.xml.pre-commit-backup']: ori_filepath = backup_file.replace('.pre-commit-backup', '') if os.path.split( ori_filepath )[-1] in ['tool_dependencies.xml', 'repository_dependencies.xml']: os.remove( ori_filepath ) shutil.move( backup_file, ori_filepath ) # abort the commit, because nothing is to commit sys.exit(1)
def add_latest_rev_and_toolshed(repo, **kwargs): """ Iterating over all, but the ignored mercurial files. If a file is called tool_dependencies.xml or repository_dependencies.xml we check if 'changeset_revision' and/or 'toolshed' is not set or empty and insert the latest revision of the corresponding repo (repo-name/owner/tooshed). The default tool_shed url is hardcoded and can be changed. This hook creates a backup of the original file, replaces revision number and toolshed and commit the adopted changes. To restore the backup files use the additional script (toolshed_pretxncommit_hook.py) as pretxncommit-hook. Add the following to your .hgrc: [hooks] pre-commit = python:.hg/toolshed_pre-commit_hook.py:add_latest_rev_and_toolshed """ toolshed_url = "http://testtoolshed.g2.bx.psu.edu/" # to force a commit the user can add a temporary file called: force_pre-commit_hook_temp_file # we will forget that file, because it should only force the execution of that function commands.forget(ui.ui(), repo, 'force_pre-commit_hook_temp_file') logging.info( 'Emtering pre-commit Hook: Updating "toolshed" and/or "changeset_revision" attribute.' ) filename_categories = repo.status(clean=True) filepaths = [item for sublist in filename_categories for item in sublist] backup_files = list() for filepath in filepaths: if os.path.split(filepath)[-1] in [ 'tool_dependencies.xml', 'repository_dependencies.xml' ]: tree = ET.parse(filepath, parser=CommentedTreeBuilder()) root = tree.getroot() change = False for repo_dep in root.iter('repository'): if repo_dep.attrib.get('changeset_revision', '') == '': logging.info( 'Change *changeset_revision* of [%s]\n in file: %s\n and repository: %s' % ('%s :: %s' % (repo_dep.attrib['owner'], repo_dep.attrib['name']), filepath, repo.url())) tip = get_latest_repo_rev( '%srepos/%s/%s' % (toolshed_url, repo_dep.attrib['owner'], repo_dep.attrib['name'])) repo_dep.attrib.update({'changeset_revision': "%s" % tip}) change = True if repo_dep.attrib.get('toolshed', '') == '': logging.info( 'Change *toolshed* of [%s]\n in file: %s\n and repository: %s' % ('%s :: %s' % (repo_dep.attrib['owner'], repo_dep.attrib['name']), filepath, repo.url())) repo_dep.attrib.update( {'toolshed': "http://testtoolshed.g2.bx.psu.edu/"}) change = True if change: backup_filepath = '%s.pre-commit-backup' % filepath backup_files.append(backup_filepath) shutil.move(filepath, backup_filepath) tree.write(filepath, xml_declaration=True, encoding='utf-8') logging.info('Add %s to repository: %s' % (filepath, repo.url())) commands.add(ui.ui(), repo, filepath) # check if there is anything to commit if not [diff for diff in patch.diff(repo)]: logging.info('Nothing to commit for repository: %s.' % repo.url()) # if nothing to commit, restore the original files # these is necessary because I could not find a 'nothing to commit'-hook for backup_file in backup_files: if os.path.split(backup_file)[-1] in [ 'tool_dependencies.xml.pre-commit-backup', 'repository_dependencies.xml.pre-commit-backup' ]: ori_filepath = backup_file.replace('.pre-commit-backup', '') if os.path.split(ori_filepath)[-1] in [ 'tool_dependencies.xml', 'repository_dependencies.xml' ]: os.remove(ori_filepath) shutil.move(backup_file, ori_filepath) # abort the commit, because nothing is to commit sys.exit(1)
elif os.path.isfile( absolute_selected_file ): os.remove( absolute_selected_file ) dir = os.path.split( absolute_selected_file )[0] try: os.rmdir( dir ) except OSError, e: # The directory is not empty. pass # See if any admin users have chosen to receive email alerts when a repository is updated. If so, check every uploaded file to ensure # content is appropriate. check_contents = check_file_contents_for_email_alerts( trans ) for filename_in_archive in filenames_in_archive: # Check file content to ensure it is appropriate. if check_contents and os.path.isfile( filename_in_archive ): content_alert_str += check_file_content_for_html_and_images( filename_in_archive ) commands.add( repo.ui, repo, filename_in_archive ) if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive ) if error: return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) admin_only = len( repository.downloadable_revisions ) != 1 suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed def handle_missing_repository_attribute( elem ): # <repository name="molecule_datatypes" owner="test" /> error_message = '' name = elem.get( 'name' )
def add_changeset( repo_ui, repo, path_to_filename_in_archive ): commands.add( repo_ui, repo, str( path_to_filename_in_archive ) )
def add_changeset(repo_ui, repo, path_to_filename_in_archive): commands.add(repo_ui, repo, str(path_to_filename_in_archive))
def hgflow_func_init(self, args, opts): if self.inited: self.output(_('Your workspace is already inited, use `hg flow check` for detail hg flow information'), '\n') return branches = self._getBranches() ctx = self.repo[None] current_branch = str(ctx.branch()) if len(branches) > 1: #more than one brnach, give a warn self.output(_('You have more than one branches:'), '\n') for b in branches: self.output(b, '\t') if b != current_branch: self.output('(inactive)') self.outputln() text = self.input(_('You want to continue flow init? [y] ')) if not ('' == text or 'y' == text or 'yes' == text): return if self._hasUncommit(): return publish_branch = 'default' develop_branch = 'develop' feature_branch_prefix = 'feature/' release_branch_prefix = 'release/' hotfix_branch_prefix = 'hotfix/' version_tag_prefix = 'release_' text = self.input(_('Branch name for production release : [default] ')) if text: publish_branch = text text = self.input(_('Branch name for "next release" development : [develop] ')) if text: develop_branch = text self.output('\n', _('How to name your supporting branch prefixes?'), '\n'); text = self.input(_('Feature branches? [feature/] ')) if text: feature_branch_prefix = text text = self.input(_('Release branches? [release/] ')) if text: release_branch_prefix = text text = self.input(_('Hotfix branches? [hotfix/] ')) if text: hotfix_branch_prefix = text text = self.input(_('Version tag prefix? [release_] ')) if text: version_tag_prefix = text #check existing branch import ConfigParser config = ConfigParser.RawConfigParser() config.add_section(BASIC_SECTION) config.set(BASIC_SECTION, SECNAME_PUBLISH_BRANCH, publish_branch) config.set(BASIC_SECTION, SECNAME_DEVELOP_BRANCH, develop_branch) config.set(BASIC_SECTION, SECNAME_FEATURE_PREFIX, feature_branch_prefix) config.set(BASIC_SECTION, SECNAME_RELEASE_PREFIX, release_branch_prefix) config.set(BASIC_SECTION, SECNAME_HOTFIX_PREFIX, hotfix_branch_prefix) config.set(BASIC_SECTION, SECNAME_VERSION_TAG_PREFIX, version_tag_prefix) configfile = open(self.cfgFile, 'wb') config.write(configfile) configfile.close() commands.add(self.ui, self.repo, self.cfgFile) commands.commit(self.ui, self.repo, self.cfgFile, message="hg flow init, add .hgflow file") if not publish_branch in branches: #create publish_branch self._createBranch(publish_branch, 'hg flow init, add branch %s' % (publish_branch, )) if not develop_branch in branches: self._createBranch(develop_branch, 'hg flow init, add branch %s' % (develop_branch, )) commands.update(self.ui, self.repo, develop_branch)
def hgflow_func_init(self, args, opts): if self.inited: self.output(_('Your workspace is already inited, use `hg flow check` for detail hg flow information'), '\n') return branches = self._getBranches() ctx = self.repo[None] current_branch = str(ctx.branch()) if len(branches) > 1: #more than one brnach, give a warn self.output(_('You have more than one branches:'), '\n') for b in branches: self.output(b, '\t') if b != current_branch: self.output('(inactive)') self.outputln() text = self.input(_('You want to continue flow init? [y] ')) if not ('' == text or 'y' == text or 'yes' == text): return if self._hasUncommit(): return publish_branch = 'default' develop_branch = 'develop' feature_branch_prefix = 'feature/' release_branch_prefix = 'release/' hotfix_branch_prefix = 'hotfix/' version_tag_prefix = '' text = self.input(_('Branch name for production release : [default] ')) if text: publish_branch = text text = self.input(_('Branch name for "next release" development : [develop] ')) if text: develop_branch = text self.output('\n', _('How to name your supporting branch prefixes?'), '\n'); text = self.input(_('Feature branches? [feature/] ')) if text: feature_branch_prefix = text text = self.input(_('Release branches? [release/] ')) if text: release_branch_prefix = text text = self.input(_('Hotfix branches? [hotfix/] ')) if text: hotfix_branch_prefix = text text = self.input(_('Version tag prefix? [] ')) if text: version_tag_prefix = text #check existing branch import ConfigParser config = ConfigParser.RawConfigParser() config.add_section(BASIC_SECTION) config.set(BASIC_SECTION, SECNAME_PUBLISH_BRANCH, publish_branch) config.set(BASIC_SECTION, SECNAME_DEVELOP_BRANCH, develop_branch) config.set(BASIC_SECTION, SECNAME_FEATURE_PREFIX, feature_branch_prefix) config.set(BASIC_SECTION, SECNAME_RELEASE_PREFIX, release_branch_prefix) config.set(BASIC_SECTION, SECNAME_HOTFIX_PREFIX, hotfix_branch_prefix) config.set(BASIC_SECTION, SECNAME_VERSION_TAG_PREFIX, version_tag_prefix) configfile = open(self.cfgFile, 'wb') config.write(configfile) configfile.close() commands.add(self.ui, self.repo, self.cfgFile) commands.commit(self.ui, self.repo, self.cfgFile, message="hg flow init, add .hgflow file") if not publish_branch in branches: #create publish_branch self._createBranch(publish_branch, 'hg flow init, add branch %s' % (publish_branch, )) if not develop_branch in branches: self._createBranch(develop_branch, 'hg flow init, add branch %s' % (develop_branch, ))
""" print_(*args, **kwargs) sys.stdout.flush() u = uimod.ui.load() print('% creating repo') repo = localrepo.localrepository(u, b'.', create=True) f = open('test.py', 'w') try: f.write('foo\n') finally: f.close print('% add and commit') commands.add(u, repo, b'test.py') commands.commit(u, repo, message=b'*') commands.status(u, repo, clean=True) print('% change') f = open('test.py', 'w') try: f.write('bar\n') finally: f.close() # this would return clean instead of changed before the fix commands.status(u, repo, clean=True, modified=True)
def addSubrepo(self): 'menu action handler for adding a new subrepository' root = hglib.tounicode(self.selitem.internalPointer().rootpath()) caption = _('Select an existing repository to add as a subrepo') FD = QFileDialog path = unicode(FD.getExistingDirectory(caption=caption, directory=root, options=FD.ShowDirsOnly | FD.ReadOnly)) if path: path = os.path.normcase(os.path.normpath(path)) sroot = paths.find_root(path) root = os.path.normcase(os.path.normpath(root)) if not sroot: qtlib.WarningMsgBox(_('Cannot add subrepository'), _('%s is not a valid repository') % path, parent=self) return elif not os.path.isdir(sroot): qtlib.WarningMsgBox(_('Cannot add subrepository'), _('"%s" is not a folder') % sroot, parent=self) return elif sroot == root: qtlib.WarningMsgBox(_('Cannot add subrepository'), _('A repository cannot be added as a subrepo of itself'), parent=self) return elif root != paths.find_root(os.path.dirname(path)): qtlib.WarningMsgBox(_('Cannot add subrepository'), _('The selected folder:<br><br>%s<br><br>' 'is not inside the target repository.<br><br>' 'This may be allowed but is greatly discouraged.<br>' 'If you want to add a non trivial subrepository mapping ' 'you must manually edit the <i>.hgsub</i> file') % root, parent=self) return else: # The selected path is the root of a repository that is inside # the selected repository # Use forward slashes for relative subrepo root paths srelroot = sroot[len(root)+1:] srelroot = util.pconvert(srelroot) # Is is already on the selected repository substate list? try: repo = hg.repository(ui.ui(), hglib.fromunicode(root)) except: qtlib.WarningMsgBox(_('Cannot open repository'), _('The selected repository:<br><br>%s<br><br>' 'cannot be open!') % root, parent=self) return if hglib.fromunicode(srelroot) in repo['.'].substate: qtlib.WarningMsgBox(_('Subrepository already exists'), _('The selected repository:<br><br>%s<br><br>' 'is already a subrepository of:<br><br>%s<br><br>' 'as: "%s"') % (sroot, root, srelroot), parent=self) return else: # Already a subrepo! # Read the current .hgsub file contents lines = [] hasHgsub = os.path.exists(repo.wjoin('.hgsub')) if hasHgsub: try: fsub = repo.wopener('.hgsub', 'r') lines = fsub.readlines() fsub.close() except: qtlib.WarningMsgBox( _('Failed to add subrepository'), _('Cannot open the .hgsub file in:<br><br>%s') \ % root, parent=self) return # Make sure that the selected subrepo (or one of its # subrepos!) is not already on the .hgsub file linesep = '' for line in lines: line = hglib.tounicode(line) spath = line.split("=")[0].strip() if not spath: continue if not linesep: linesep = hglib.getLineSeparator(line) spath = util.pconvert(spath) if line.startswith(srelroot): qtlib.WarningMsgBox( _('Failed to add repository'), _('The .hgsub file already contains the ' 'line:<br><br>%s') % line, parent=self) return # Append the new subrepo to the end of the .hgsub file lines.append(hglib.fromunicode('%s = %s' % (srelroot, srelroot))) lines = [line.strip(linesep) for line in lines] # and update the .hgsub file try: fsub = repo.wopener('.hgsub', 'w') fsub.write(linesep.join(lines)) fsub.close() if not hasHgsub: commands.add(ui.ui(), repo, repo.wjoin('.hgsub')) qtlib.InfoMsgBox( _('Subrepo added to .hgsub file'), _('The selected subrepo:<br><br><i>%s</i><br><br>' 'has been added to the .hgsub file of the repository:<br><br><i>%s</i><br><br>' 'Remember that in order to finish adding the ' 'subrepo <i>you must still <u>commit</u></i> the ' 'changes to the .hgsub file in order to confirm ' 'the addition of the subrepo.') \ % (srelroot, root), parent=self) except: qtlib.WarningMsgBox( _('Failed to add repository'), _('Cannot update the .hgsub file in:<br><br>%s') \ % root, parent=self) return qtlib.WarningMsgBox( _('Failed to add repository'), _('"%s" is not a valid repository inside "%s"') % \ (path, root), parent=self) return
def upload(self, trans, **kwd): params = util.Params(kwd) message = util.restore_text(params.get("message", "")) status = params.get("status", "done") commit_message = util.restore_text(params.get("commit_message", "Uploaded")) category_ids = util.listify(params.get("category_id", "")) categories = get_categories(trans) repository_id = params.get("repository_id", "") repository = get_repository(trans, repository_id) repo_dir = repository.repo_path repo = hg.repository(get_configured_ui(), repo_dir) uncompress_file = util.string_as_bool(params.get("uncompress_file", "true")) remove_repo_files_not_in_tar = util.string_as_bool(params.get("remove_repo_files_not_in_tar", "true")) uploaded_file = None upload_point = self.__get_upload_point(repository, **kwd) tip = repository.tip file_data = params.get("file_data", "") url = params.get("url", "") # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new if params.get("upload_button", False): current_working_dir = os.getcwd() if file_data == "" and url == "": message = "No files were entered on the upload form." status = "error" uploaded_file = None elif url: valid_url = True try: stream = urllib.urlopen(url) except Exception, e: valid_url = False message = "Error uploading file via http: %s" % str(e) status = "error" uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, "wb") while 1: chunk = stream.read(CHUNK_SIZE) if not chunk: break uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split("/")[-1] isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 elif file_data not in ("", None): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name uploaded_file_filename = file_data.filename isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 if uploaded_file: isgzip = False isbz2 = False if uncompress_file: isgzip = is_gzip(uploaded_file_name) if not isgzip: isbz2 = is_bz2(uploaded_file_name) ok = True if isempty: tar = None istar = False else: # Determine what we have - a single file or an archive try: if (isgzip or isbz2) and uncompress_file: # Open for reading with transparent compression. tar = tarfile.open(uploaded_file_name, "r:*") else: tar = tarfile.open(uploaded_file_name) istar = True except tarfile.ReadError, e: tar = None istar = False if istar: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = self.upload_tar( trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert, ) else: if (isgzip or isbz2) and uncompress_file: uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ) if upload_point is not None: full_path = os.path.abspath(os.path.join(repo_dir, upload_point, uploaded_file_filename)) else: full_path = os.path.abspath(os.path.join(repo_dir, uploaded_file_filename)) # Move the uploaded file to the load_point within the repository hierarchy. shutil.move(uploaded_file_name, full_path) # See if any admin users have chosen to receive email alerts when a repository is # updated. If so, check every uploaded file to ensure content is appropriate. check_contents = check_file_contents(trans) if check_contents and os.path.isfile(full_path): content_alert_str = self.__check_file_content(full_path) else: content_alert_str = "" commands.add(repo.ui, repo, full_path) # Convert from unicode to prevent "TypeError: array item must be char" full_path = full_path.encode("ascii", "replace") commands.commit(repo.ui, repo, full_path, user=trans.user.username, message=commit_message) if full_path.endswith("tool_data_table_conf.xml.sample"): # Handle the special case where a tool_data_table_conf.xml.sample # file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary as well as # appending them to the shed's tool_data_table_conf.xml file on disk. error, error_message = handle_sample_tool_data_table_conf_file(trans.app, full_path) if error: message = "%s<br/>%s" % (message, error_message) if full_path.endswith(".loc.sample"): # Handle the special case where a xxx.loc.sample file is being uploaded by copying it to ~/tool-data/xxx.loc. copy_sample_file(trans.app, full_path) # See if the content of the change set was valid. admin_only = len(repository.downloadable_revisions) != 1 handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only, ) if ok: # Update the repository files for browsing. update_repository(repo) # Get the new repository tip. if tip != repository.tip: if (isgzip or isbz2) and uncompress_file: uncompress_str = " uncompressed and " else: uncompress_str = " " message = "The file '%s' has been successfully%suploaded to the repository." % ( uploaded_file_filename, uncompress_str, ) if istar and (undesirable_dirs_removed or undesirable_files_removed): items_removed = undesirable_dirs_removed + undesirable_files_removed message += ( " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive." % items_removed ) if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += ( " %d files were removed from the repository relative to the selected upload point '%s'." % (len(files_to_remove), upload_point) ) else: message += " %d files were removed from the repository root." % len(files_to_remove) else: message = "No changes to repository." # Set metadata on the repository tip. error_message, status = set_repository_metadata( trans, repository_id, repository.tip, content_alert_str=content_alert_str, **kwd ) if error_message: # If there is an error, display it. message = "%s<br/>%s" % (message, error_message) return trans.response.send_redirect( web.url_for( controller="repository", action="manage_repository", id=repository_id, message=message, status=status, ) ) else: # If no error occurred in setting metadata on the repository tip, reset metadata on all # changeset revisions for the repository. This will result in a more standardized set of # valid repository revisions that can be installed. reset_all_repository_metadata(trans, repository_id, **kwd) trans.response.send_redirect( web.url_for( controller="repository", action="browse_repository", id=repository_id, commit_message="Deleted selected files", webapp="community", message=message, status=status, ) ) else: status = "error"
def upload(self, trans, **kwd): params = util.Params(kwd) message = util.restore_text(params.get('message', '')) status = params.get('status', 'done') commit_message = util.restore_text( params.get('commit_message', 'Uploaded')) category_ids = util.listify(params.get('category_id', '')) categories = suc.get_categories(trans) repository_id = params.get('repository_id', '') repository = suc.get_repository_in_tool_shed(trans, repository_id) repo_dir = repository.repo_path(trans.app) repo = hg.repository(suc.get_configured_ui(), repo_dir) uncompress_file = util.string_as_bool( params.get('uncompress_file', 'true')) remove_repo_files_not_in_tar = util.string_as_bool( params.get('remove_repo_files_not_in_tar', 'true')) uploaded_file = None upload_point = self.__get_upload_point(repository, **kwd) tip = repository.tip(trans.app) file_data = params.get('file_data', '') url = params.get('url', '') # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new(trans.app) uploaded_directory = None if params.get('upload_button', False): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith('hg'): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[len('hg'):] repo_url = repo_url.encode('ascii', 'replace') commands.clone(suc.get_configured_ui(), repo_url, uploaded_directory) elif url: valid_url = True try: stream = urllib.urlopen(url) except Exception, e: valid_url = False message = 'Error uploading file via http: %s' % str(e) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, 'wb') while 1: chunk = stream.read(CHUNK_SIZE) if not chunk: break uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split('/')[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 elif file_data not in ('', None): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name uploaded_file_filename = os.path.split(file_data.filename)[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 if uploaded_file or uploaded_directory: ok = True isgzip = False isbz2 = False if uploaded_file: if uncompress_file: isgzip = checkers.is_gzip(uploaded_file_name) if not isgzip: isbz2 = checkers.is_bz2(uploaded_file_name) if isempty: tar = None istar = False else: # Determine what we have - a single file or an archive try: if (isgzip or isbz2) and uncompress_file: # Open for reading with transparent compression. tar = tarfile.open(uploaded_file_name, 'r:*') else: tar = tarfile.open(uploaded_file_name) istar = True except tarfile.ReadError, e: tar = None istar = False else: # Uploaded directory istar = False if istar: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_tar( trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) elif uploaded_directory: ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) else: if (isgzip or isbz2) and uncompress_file: uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2) if upload_point is not None: full_path = os.path.abspath( os.path.join(repo_dir, upload_point, uploaded_file_filename)) else: full_path = os.path.abspath( os.path.join(repo_dir, uploaded_file_filename)) # Move the uploaded file to the load_point within the repository hierarchy. shutil.move(uploaded_file_name, full_path) # See if any admin users have chosen to receive email alerts when a repository is # updated. If so, check every uploaded file to ensure content is appropriate. check_contents = suc.check_file_contents(trans) if check_contents and os.path.isfile(full_path): content_alert_str = self.__check_file_content( full_path) else: content_alert_str = '' commands.add(repo.ui, repo, full_path) # Convert from unicode to prevent "TypeError: array item must be char" full_path = full_path.encode('ascii', 'replace') commands.commit(repo.ui, repo, full_path, user=trans.user.username, message=commit_message) if full_path.endswith('tool_data_table_conf.xml.sample'): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. error, error_message = suc.handle_sample_tool_data_table_conf_file( trans.app, full_path) if error: message = '%s<br/>%s' % (message, error_message) # See if the content of the change set was valid. admin_only = len(repository.downloadable_revisions) != 1 suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only) if ok: # Update the repository files for browsing. suc.update_repository(repo) # Get the new repository tip. if tip == repository.tip(trans.app): message = 'No changes to repository. ' status = 'warning' else: if (isgzip or isbz2) and uncompress_file: uncompress_str = ' uncompressed and ' else: uncompress_str = ' ' if uploaded_directory: source_type = "repository" source = url else: source_type = "file" source = uploaded_file_filename message = "The %s '%s' has been successfully%suploaded to the repository. " % ( source_type, source, uncompress_str) if istar and (undesirable_dirs_removed or undesirable_files_removed): items_removed = undesirable_dirs_removed + undesirable_files_removed message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive. " % items_removed if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += " %d files were removed from the repository relative to the selected upload point '%s'. " % ( len(files_to_remove), upload_point) else: message += " %d files were removed from the repository root. " % len( files_to_remove) kwd['message'] = message suc.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd) # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch # or some other problem. if suc.get_config_from_disk('tool_dependencies.xml', repo_dir): if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload. metadata_dict = repository.metadata_revisions[ 0].metadata else: metadata_dict = {} if suc.has_orphan_tool_dependencies_in_tool_shed( metadata_dict): message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file", ' message += 'so one or more of the defined tool dependencies are considered orphans within this repository.' status = 'warning' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. suc.reset_tool_data_tables(trans.app) trans.response.send_redirect( web.url_for(controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status)) else: status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. suc.reset_tool_data_tables(trans.app)
os.remove(absolute_selected_file) dir = os.path.split(absolute_selected_file)[0] try: os.rmdir(dir) except OSError, e: # The directory is not empty. pass # See if any admin users have chosen to receive email alerts when a repository is updated. If so, check every uploaded file to ensure # content is appropriate. check_contents = check_file_contents_for_email_alerts(trans) for filename_in_archive in filenames_in_archive: # Check file content to ensure it is appropriate. if check_contents and os.path.isfile(filename_in_archive): content_alert_str += check_file_content_for_html_and_images( filename_in_archive) commands.add(repo.ui, repo, filename_in_archive) if filename_in_archive.endswith('tool_data_table_conf.xml.sample'): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive) if error: return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed commands.commit(repo.ui, repo, full_path, user=trans.user.username, message=commit_message) admin_only = len(repository.downloadable_revisions) != 1 suc.handle_email_alerts(trans, repository,
def iadd(ui, repo, id=None, comment=0, **opts): """Adds a new issue, or comment to an existing issue ID or its comment COMMENT""" comment = int(comment) # First, make sure issues have a directory issues_dir = ui.config('artemis', 'issues', default=default_issues_dir) issues_path = os.path.join(repo.root, issues_dir) if not os.path.exists(issues_path): os.mkdir(issues_path) if id: issue_fn, issue_id = _find_issue(ui, repo, id) if not issue_fn: ui.warn('No such issue\n') return _create_missing_dirs(issues_path, issue_id) mbox = mailbox.Maildir(issue_fn, factory=mailbox.MaildirMessage) keys = _order_keys_date(mbox) root = keys[0] user = ui.username() default_issue_text = "From: %s\nDate: %s\n" % (user, datestr(format=date_format)) if not id: default_issue_text += "State: %s\n" % default_state default_issue_text += "Subject: brief description\n\n" else: subject = mbox[(comment < len(mbox) and keys[comment]) or root]['Subject'] if not subject.startswith('Re: '): subject = 'Re: ' + subject default_issue_text += "Subject: %s\n\n" % subject default_issue_text += "Detailed description." # Get properties, and figure out if we need an explicit comment properties = _get_properties(opts['property']) no_comment = id and properties and opts['no_property_comment'] message = opts['message'] # Create the text if message: if not id: state_str = 'State: %s\n' % default_state else: state_str = '' issue = "From: %s\nDate: %s\nSubject: %s\n%s" % \ (user, datestr(format=date_format), message, state_str) elif not no_comment: issue = ui.edit(default_issue_text, user) if issue.strip() == '': ui.warn('Empty issue, ignoring\n') return if issue.strip() == default_issue_text: ui.warn('Unchanged issue text, ignoring\n') return else: # Write down a comment about updated properties properties_subject = ', '.join( ['%s=%s' % (property, value) for (property, value) in properties]) issue = "From: %s\nDate: %s\nSubject: changed properties (%s)\n" % \ (user, datestr(format = date_format), properties_subject) # Create the message msg = mailbox.MaildirMessage(issue) if opts['attach']: outer = _attach_files(msg, opts['attach']) else: outer = msg # Pick random filename if not id: issue_fn = issues_path while os.path.exists(issue_fn): issue_id = _random_id() issue_fn = os.path.join(issues_path, issue_id) mbox = mailbox.Maildir(issue_fn, factory=mailbox.MaildirMessage) keys = _order_keys_date(mbox) # else: issue_fn already set # Add message to the mailbox mbox.lock() if id and comment >= len(mbox): ui.warn( 'No such comment number in mailbox, commenting on the issue itself\n' ) if not id: outer.add_header( 'Message-Id', "<%s-0-artemis@%s>" % (issue_id, socket.gethostname())) else: root = keys[0] outer.add_header( 'Message-Id', "<%s-%s-artemis@%s>" % (issue_id, _random_id(), socket.gethostname())) outer.add_header( 'References', mbox[(comment < len(mbox) and keys[comment]) or root]['Message-Id']) outer.add_header( 'In-Reply-To', mbox[(comment < len(mbox) and keys[comment]) or root]['Message-Id']) new_bug_path = issue_fn + '/new/' + mbox.add(outer) commands.add(ui, repo, new_bug_path) # Fix properties in the root message if properties: root = _find_root_key(mbox) msg = mbox[root] for property, value in properties: if property in msg: msg.replace_header(property, value) else: msg.add_header(property, value) mbox[root] = msg mbox.close() if opts['commit']: commands.commit(ui, repo, issue_fn) # If adding issue, add the new mailbox to the repository if not id: ui.status('Added new issue %s\n' % issue_id) else: _show_mbox(ui, mbox, 0)
def addlf(parent, ui, repo, files): commands.add(ui, repo, lfsize='', normal=None, large=True, *files) return True
def add_file(self, filepath): with open(filepath, 'w') as f: f.writelines("Don't Care") commands.add(ui.ui(), self.repo, filepath)