def paged_diffs(self, commit_id, start=0, end=None, onlyChangedFiles=False): result = {'added': [], 'removed': [], 'changed': [], 'copied': [], 'renamed': []} cmd_args = ['--no-commit-id', '--name-status', '--no-abbrev', '--root', # show tree entry itself as well as subtrees (Commit.added_paths relies on this) '-t', '-z' # don't escape filenames and use \x00 as fields delimiter ] if onlyChangedFiles: cmd_args[4] = '-r' if asbool(tg.config.get('scm.commit.git.detect_copies', True)): cmd_args += ['-M', '-C'] cmd_output = self._git.git.diff_tree(commit_id, *cmd_args).split('\x00')[:-1] # don't escape filenames and use \x00 as fields delimiter ''' cmd_output will be like: [ 'A', 'filename', 'D', 'another filename', 'M', 'po', 'R100', # <-- These next three lines would only show up with 'detect_copies' enabled 'po/sr.po', 'po/sr_Latn.po', ] ''' x = 0 files = [] while x < len(cmd_output): status = cmd_output[x][0] if status in ('R', 'C'): ratio = float(cmd_output[x][1:4]) / 100.0 files.append((status, { 'new': h.really_unicode(cmd_output[x + 2]), 'old': h.really_unicode(cmd_output[x + 1]), 'ratio': ratio, })) x += 3 else: files.append((status, h.really_unicode(cmd_output[x + 1]))) x += 2 for status, name in files[start:end]: change_list = { 'R': result['renamed'], 'C': result['copied'], 'A': result['added'], 'D': result['removed'], 'M': result['changed'] }[status] change_list.append(name) result['total'] = len(files) return result
def create_forum(app, new_forum): if 'parent' in new_forum and new_forum['parent']: parent_id = ObjectId(str(new_forum['parent'])) shortname = (DM.Forum.query.get(_id=parent_id).shortname + '/' + new_forum['shortname']) else: parent_id=None shortname = new_forum['shortname'] description = new_forum.get('description','') f = DM.Forum(app_config_id=app.config._id, parent_id=parent_id, name=h.really_unicode(new_forum['name']), shortname=h.really_unicode(shortname), description=h.really_unicode(description), members_only=new_forum.get('members_only', False), anon_posts=new_forum.get('anon_posts', False), monitoring_email=new_forum.get('monitoring_email', None), ) if f.members_only and f.anon_posts: flash('You cannot have anonymous posts in a members only forum.', 'warning') f.anon_posts = False if f.members_only: role_developer = ProjectRole.by_name('Developer')._id f.acl = [ ACE.allow(role_developer, ALL_PERMISSIONS), DENY_ALL] elif f.anon_posts: role_anon = ProjectRole.anonymous()._id f.acl = [ACE.allow(role_anon, 'post')] else: f.acl = [] if 'icon' in new_forum and new_forum['icon'] is not None and new_forum['icon'] != '': save_forum_icon(f, new_forum['icon']) return f
def create_forum(app, new_forum): if 'parent' in new_forum and new_forum['parent']: parent_id = ObjectId(str(new_forum['parent'])) shortname = (DM.Forum.query.get(_id=parent_id).shortname + '/' + new_forum['shortname']) else: parent_id = None shortname = new_forum['shortname'] description = new_forum.get('description', '') f = DM.Forum( app_config_id=app.config._id, parent_id=parent_id, name=h.really_unicode(new_forum['name']), shortname=h.really_unicode(shortname), description=h.really_unicode(description), members_only=new_forum.get('members_only', False), anon_posts=new_forum.get('anon_posts', False), monitoring_email=new_forum.get('monitoring_email', None), ) if f.members_only and f.anon_posts: flash('You cannot have anonymous posts in a members only forum.', 'warning') f.anon_posts = False if f.members_only: role_developer = ProjectRole.by_name('Developer')._id f.acl = [ACE.allow(role_developer, ALL_PERMISSIONS), DENY_ALL] elif f.anon_posts: role_anon = ProjectRole.anonymous()._id f.acl = [ACE.allow(role_anon, 'post')] else: f.acl = [] return f
def set_home(self, new_home): self.app.root_page_name = new_home self.app.upsert_root(new_home) flash('Home updated') mount_base = c.project.url()+self.app.config.options.mount_point+'/' url = h.really_unicode(mount_base).encode('utf-8') + h.really_unicode(new_home).encode('utf-8')+'/' redirect(url)
def diff(self, commit, fmt=None): try: path, filename = os.path.split(self._blob.path()) a_ci = c.app.repo.commit(commit) a = a_ci.get_path(self._blob.path()) apath = a.path() except: a = [] apath = '' b = self._blob if not self._blob.has_html_view: diff = "Cannot display: file marked as a binary type." return dict(a=a, b=b, diff=diff) la = list(a) lb = list(b) adesc = (u'a' + h.really_unicode(apath)).encode('utf-8') bdesc = (u'b' + h.really_unicode(b.path())).encode('utf-8') if not fmt: fmt = web_session.get('diformat', '') else: web_session['diformat'] = fmt web_session.save() if fmt == 'sidebyside': hd = HtmlSideBySideDiff() diff = hd.make_table(la, lb, adesc, bdesc) else: diff = ''.join(difflib.unified_diff(la, lb, adesc, bdesc)) return dict(a=a, b=b, diff=diff)
def diff(self, commit, fmt=None, **kw): try: path, filename = os.path.split(self._blob.path()) a_ci = c.app.repo.commit(commit) a = a_ci.get_path(self._blob.path()) apath = a.path() except: a = [] apath = '' b = self._blob if not self._blob.has_html_view: diff = "Cannot display: file marked as a binary type." return dict(a=a, b=b, diff=diff) la = list(a) lb = list(b) adesc = (u'a' + h.really_unicode(apath)).encode('utf-8') bdesc = (u'b' + h.really_unicode(b.path())).encode('utf-8') if not fmt: fmt = web_session.get('diformat', '') else: web_session['diformat'] = fmt web_session.save() if fmt == 'sidebyside': hd = HtmlSideBySideDiff() diff = hd.make_table(la, lb, adesc, bdesc) else: diff = ''.join(difflib.unified_diff(la, lb, adesc, bdesc)) return dict(a=a, b=b, diff=diff)
def refresh_commit_info(self, oid, seen, lazy=True): from allura.model.repository import CommitDoc ci_doc = CommitDoc.m.get(_id=oid) if ci_doc and lazy: return False ci = self._git.rev_parse(oid) args = dict(tree_id=ci.tree.hexsha, committed=Object( name=h.really_unicode(ci.committer.name), email=h.really_unicode(ci.committer.email), date=datetime.utcfromtimestamp(ci.committed_date)), authored=Object(name=h.really_unicode(ci.author.name), email=h.really_unicode(ci.author.email), date=datetime.utcfromtimestamp( ci.authored_date)), message=h.really_unicode(ci.message or ''), child_ids=[], parent_ids=[p.hexsha for p in ci.parents]) if ci_doc: ci_doc.update(**args) ci_doc.m.save() else: ci_doc = CommitDoc(dict(args, _id=ci.hexsha)) try: ci_doc.m.insert(safe=True) except DuplicateKeyError: if lazy: return False self.refresh_tree_info(ci.tree, seen, lazy) return True
def _make_page(self, text, filename, commit, renamed_to=None): orig_name = self._format_supported(filename) renamed_orig_name = self._format_supported( renamed_to) if renamed_to else None if not orig_name: return if renamed_to and not renamed_orig_name: return mod_date = datetime.utcfromtimestamp(commit.committed_date) wiki_page = WM.Page.upsert(self._convert_page_name(orig_name)) wiki_page.timestamp = wiki_page.mod_date = mod_date if renamed_orig_name and renamed_to in commit.tree: wiki_page.title = self._convert_page_name(renamed_orig_name) wiki_page.text = self.convert_markup(h.really_unicode(text), renamed_to) elif filename in commit.tree: wiki_page.text = self.convert_markup(h.really_unicode(text), filename) else: wiki_page.delete() import_id_name = renamed_orig_name if renamed_orig_name else orig_name wiki_page.import_id = ImportIdConverter.get().expand( import_id_name, self.app) wiki_page.commit() return wiki_page
def _computeLines(newblob, oldblob=None): if oldblob: listold = list(oldblob) else: listold = [] if newblob: listnew = list(newblob) else: listnew = [] if oldblob is None: lines = len(listnew) elif newblob and newblob.has_html_view: # py2 unified_diff can handle some unicode but not consistently, so best to do ensure_str (can drop it on py3) diff = difflib.unified_diff([ six.ensure_str(h.really_unicode(line)) for line in listold ], [ six.ensure_str(h.really_unicode(line)) for line in listnew ], six.ensure_str('old' + oldblob.path()), six.ensure_str('new' + newblob.path())) lines = len([l for l in diff if len(l) > 0 and l[0] == '+']) - 1 else: lines = 0 return lines
def register(self, project_unixname=None, project_description=None, project_name=None, neighborhood=None, private_project=None, tools=None, **kw): require_access(self.neighborhood, 'register') if private_project: require_access(self.neighborhood, 'admin') neighborhood = M.Neighborhood.query.get(name=neighborhood) project_description = h.really_unicode(project_description or '').encode('utf-8') project_name = h.really_unicode(project_name or '').encode('utf-8') project_unixname = h.really_unicode(project_unixname or '').encode('utf-8').lower() try: c.project = neighborhood.register_project(project_unixname, project_name=project_name, private_project=private_project) except exceptions.ProjectOverlimitError: flash("You have exceeded the maximum number of projects you are allowed to create", 'error') redirect('add_project') except exceptions.ProjectRatelimitError: flash("Project creation rate limit exceeded. Please try again later.", 'error') redirect('add_project') if project_description: c.project.short_description = project_description offset = c.project.next_mount_point(include_hidden=True) if tools and not neighborhood.project_template: for i, tool in enumerate(tools): c.project.install_app(tool, ordinal=i + offset) flash('Welcome to the SourceForge Project System! ' 'To get started, fill out some information about your project.') redirect(c.project.script_name + 'admin/overview')
def refresh_commit_info(self, oid, seen, lazy=True): from allura.model.repo import CommitDoc ci_doc = CommitDoc.m.get(_id=oid) if ci_doc and lazy: return False obj = self._hg[oid] # Save commit metadata mo = self.re_hg_user.match(obj.user()) if mo: user_name, user_email = mo.groups() else: user_name = user_email = obj.user() user = Object( name=h.really_unicode(user_name), email=h.really_unicode(user_email), date=datetime.utcfromtimestamp(sum(obj.date()))) fake_tree = self._tree_from_changectx(obj) args = dict( tree_id=fake_tree.hex(), committed=user, authored=user, message=h.really_unicode(obj.description() or ''), child_ids=[], parent_ids=[ p.hex() for p in self.real_parents(obj) if p.hex() != obj.hex() ]) if ci_doc: ci_doc.update(args) ci_doc.m.save() else: ci_doc = CommitDoc(dict(args, _id=oid)) try: ci_doc.m.insert(safe=True) except DuplicateKeyError: if lazy: return False self.refresh_tree_info(fake_tree, seen, lazy) return True
def refresh_commit_info(self, oid, seen, lazy=True): from allura.model.repository import CommitDoc ci_doc = CommitDoc.m.get(_id=oid) if ci_doc and lazy: return False ci = self._git.rev_parse(oid) args = dict( tree_id=ci.tree.hexsha, committed=Object( name=h.really_unicode(ci.committer.name), email=h.really_unicode(ci.committer.email), date=datetime.utcfromtimestamp(ci.committed_date)), authored=Object( name=h.really_unicode(ci.author.name), email=h.really_unicode(ci.author.email), date=datetime.utcfromtimestamp(ci.authored_date)), message=h.really_unicode(ci.message or ''), child_ids=[], parent_ids=[p.hexsha for p in ci.parents]) if ci_doc: ci_doc.update(**args) ci_doc.m.save() else: ci_doc = CommitDoc(dict(args, _id=ci.hexsha)) try: ci_doc.m.insert(safe=True) except DuplicateKeyError: if lazy: return False self.refresh_tree_info(ci.tree, seen, lazy) return True
def _lookup(self, next, *rest): if not rest and request.response_ext: # Directory name may ends with file extension (e.g. `dir.rdf`) # dispatching system will cut extension, so we need to restore it next = "%s%s" % (next, request.response_ext) request.response_ext = None request.response_type = None next = h.really_unicode(unquote(next)) if not rest: # Might be a file rather than a dir filename = h.really_unicode( unquote( request.environ['PATH_INFO'].rsplit('/')[-1])) if filename: try: obj = self._tree[filename] except KeyError: raise exc.HTTPNotFound() if isinstance(obj, M.repo.Blob): return self.FileBrowserClass( self._commit, self._tree, filename), rest elif rest == ('index', ): rest = (request.environ['PATH_INFO'].rsplit('/')[-1],) tree = self._tree[next] if tree is None: raise exc.HTTPNotFound return self.__class__( self._commit, tree, self._path + '/' + next, self), rest
def _map_log(self, ci, url, path=None): revno = ci.revision.number rev = pysvn.Revision(pysvn.opt_revision_kind.number, revno) try: size = int(self._svn.list(url, revision=rev, peg_revision=rev)[0][0].size) except pysvn.ClientError: size = None rename_details = {} changed_paths = ci.get("changed_paths", []) for changed_path in changed_paths: changed_path = self._check_changed_path(changed_path, path) if changed_path["copyfrom_path"] and changed_path["path"] == path and changed_path["action"] == "A": rename_details["path"] = changed_path["copyfrom_path"] rename_details["commit_url"] = self._repo.url_for_commit(changed_path["copyfrom_revision"].number) break return { "id": revno, "message": h.really_unicode(ci.get("message", "--none--")), "authored": { "name": h.really_unicode(ci.get("author", "--none--")), "email": "", "date": datetime.utcfromtimestamp(ci.date), }, "committed": { "name": h.really_unicode(ci.get("author", "--none--")), "email": "", "date": datetime.utcfromtimestamp(ci.date), }, "refs": ["HEAD"] if revno == self.head else [], "parents": [revno - 1] if revno > 1 else [], "size": size, "rename_details": rename_details, }
def register(self, project_unixname=None, project_description=None, project_name=None, neighborhood=None, private_project=None, tools=None, **kw): require_access(self.neighborhood, 'register') if private_project: require_access(self.neighborhood, 'admin') neighborhood = M.Neighborhood.query.get(name=neighborhood) project_description = h.really_unicode(project_description or '').encode('utf-8') project_name = h.really_unicode(project_name or '').encode('utf-8') project_unixname = h.really_unicode(project_unixname or '').encode('utf-8').lower() try: c.project = neighborhood.register_project(project_unixname, project_name=project_name, private_project=private_project) except exceptions.ProjectOverlimitError: flash("You have exceeded the maximum number of projects you are allowed to create", 'error') redirect('add_project') except exceptions.ProjectRatelimitError: flash("Project creation rate limit exceeded. Please try again later.", 'error') redirect('add_project') except Exception as e: log.error('error registering project: %s', project_unixname, exc_info=True) flash('Internal Error. Please try again later.', 'error') redirect('add_project') if project_description: c.project.short_description = project_description offset = c.project.next_mount_point(include_hidden=True) if tools and not neighborhood.project_template: anchored_tools = neighborhood.get_anchored_tools() for i, tool in enumerate(tools): if (tool.lower() not in anchored_tools.keys()) and (c.project.app_instance(tool) is None): c.project.install_app(tool, ordinal=i + offset) flash('Welcome to the %s Project System! ' 'To get started, fill out some information about your project.' % config['site_name']) redirect(c.project.script_name + 'admin/overview')
def _lookup(self, next, *rest): next = h.really_unicode(unquote(next)) if not rest: # Might be a file rather than a dir filename = h.really_unicode( unquote( request.environ['PATH_INFO'].rsplit('/')[-1])) if filename: try: obj = self._tree[filename] except KeyError: raise exc.HTTPNotFound() if isinstance(obj, M.repository.Blob): return self.FileBrowserClass( self._commit, self._tree, filename), rest elif rest == ('index', ): rest = (request.environ['PATH_INFO'].rsplit('/')[-1],) try: tree = self._tree[next] except KeyError: raise exc.HTTPNotFound return self.__class__( self._commit, tree, self._path + '/' + next, self), rest
def refresh_commit_info(self, oid, seen, lazy=True): from allura.model.repo import CommitDoc ci_doc = CommitDoc.m.get(_id=oid) if ci_doc and lazy: return False obj = self._hg[oid] # Save commit metadata mo = self.re_hg_user.match(obj.user()) if mo: user_name, user_email = mo.groups() else: user_name = user_email = obj.user() user = Object(name=h.really_unicode(user_name), email=h.really_unicode(user_email), date=datetime.utcfromtimestamp(obj.date()[0])) fake_tree = self._tree_from_changectx(obj) args = dict(tree_id=fake_tree.hex(), committed=user, authored=user, message=h.really_unicode(obj.description() or ''), child_ids=[], parent_ids=[ p.hex() for p in self.real_parents(obj) if p.hex() != obj.hex() ]) if ci_doc: ci_doc.update(args) ci_doc.m.save() else: ci_doc = CommitDoc(dict(args, _id=oid)) try: ci_doc.m.insert(safe=True) except DuplicateKeyError: if lazy: return False self.refresh_tree_info(fake_tree, seen, lazy) return True
def paged_diffs(self, commit_id, start=0, end=None, onlyChangedFiles=False): result = { 'added': [], 'removed': [], 'changed': [], 'copied': [], 'renamed': [], 'total': 0 } rev = self._revision(commit_id) try: log_info = self._svn.log(self._url, revision_start=rev, revision_end=rev, discover_changed_paths=True) except pysvn.ClientError: log.info('Error getting paged_diffs log of %s on %s', commit_id, self._url, exc_info=True) return result if len(log_info) == 0: return result paths = sorted(log_info[0].changed_paths, key=op.itemgetter('path')) result['total'] = len(paths) for p in paths[start:end]: if p['copyfrom_path'] is not None: result['copied'].append({ 'new': h.really_unicode(p.path), 'old': h.really_unicode(p.copyfrom_path), 'ratio': 1, }) elif p['action'] == 'A': result['added'].append(h.really_unicode(p.path)) elif p['action'] == 'D': result['removed'].append(h.really_unicode(p.path)) elif p['action'] in ['M', 'R']: # 'R' means 'Replaced', i.e. # svn rm aaa.txt # echo "Completely new aaa!" > aaa.txt # svn add aaa.txt # svn commit -m "Replace aaa.txt" result['changed'].append(h.really_unicode(p.path)) for r in result['copied'][:]: if r['old'] in result['removed']: result['removed'].remove(r['old']) result['copied'].remove(r) result['renamed'].append(r) if r['new'] in result['added']: result['added'].remove(r['new']) return result
def process(self, project_name=None, project_shortname=None, tools=None, **kw): project_name = h.really_unicode(project_name).encode('utf-8') project_shortname = h.really_unicode(project_shortname).encode('utf-8').lower() try: c.project = self.neighborhood.register_project(project_shortname, project_name=project_name) except exceptions.ProjectOverlimitError: flash("You have exceeded the maximum number of projects you are allowed to create", 'error') redirect('.') except exceptions.ProjectRatelimitError: flash("Project creation rate limit exceeded. Please try again later.", 'error') redirect('.') except Exception as e: log.error('error registering project: %s', project_shortname, exc_info=True) flash('Internal Error. Please try again later.', 'error') redirect('.') c.project.set_tool_data('google-code', project_name=project_name) tasks.import_project_info.post() for importer_name in tools: tasks.import_tool.post(importer_name) flash('Welcome to the %s Project System! ' 'Your project data will be imported and should show up here shortly.' % config['site_name']) redirect(c.project.script_name + 'admin/overview')
def _make_page(self, text, filename, commit, renamed_to=None): orig_name = self._format_supported(filename) renamed_orig_name = self._format_supported( renamed_to) if renamed_to else None if not orig_name: return if renamed_to and not renamed_orig_name: return mod_date = datetime.utcfromtimestamp(commit.committed_date) wiki_page = WM.Page.upsert(self._convert_page_name(orig_name)) wiki_page.timestamp = wiki_page.mod_date = mod_date wiki_page.viewable_by = ['all'] if renamed_orig_name and renamed_to in commit.tree: wiki_page.title = self._convert_page_name(renamed_orig_name) wiki_page.text = self.convert_markup( h.really_unicode(text), renamed_to) elif filename in commit.tree: wiki_page.text = self.convert_markup( h.really_unicode(text), filename) else: wiki_page.delete() import_id_name = renamed_orig_name if renamed_orig_name else orig_name wiki_page.import_id = ImportIdConverter.get().expand( import_id_name, self.app) wiki_page.commit() return wiki_page
def register(self, project_unixname=None, project_description=None, project_name=None, neighborhood=None, private_project=None, tools=None, **kw): require_access(self.neighborhood, 'register') if private_project: require_access(self.neighborhood, 'admin') neighborhood = M.Neighborhood.query.get(name=neighborhood) project_description = h.really_unicode(project_description or '').encode('utf-8') project_name = h.really_unicode(project_name or '').encode('utf-8') project_unixname = h.really_unicode(project_unixname or '').encode('utf-8').lower() try: c.project = neighborhood.register_project( project_unixname, project_name=project_name, private_project=private_project) except exceptions.ProjectOverlimitError: flash( "You have exceeded the maximum number of projects you are allowed to create", 'error') redirect('add_project') except exceptions.ProjectRatelimitError: flash( "Project creation rate limit exceeded. Please try again later.", 'error') redirect('add_project') except exceptions.ProjectPhoneVerificationError: flash('You must pass phone verification', 'error') redirect('add_project') except Exception as e: log.error('error registering project: %s', project_unixname, exc_info=True) flash('Internal Error. Please try again later.', 'error') redirect('add_project') if project_description: c.project.short_description = project_description offset = c.project.next_mount_point(include_hidden=True) if tools and not neighborhood.project_template: anchored_tools = neighborhood.get_anchored_tools() install_params = [] for i, tool in enumerate(tools): if (tool.lower() not in anchored_tools.keys()) and ( c.project.app_instance(tool) is None): install_params.append( dict(ep_name=tool, ordinal=i + offset)) c.project.install_apps(install_params) flash('Welcome to the %s Project System! ' 'To get started, fill out some information about your project.' % config['site_name']) redirect(c.project.script_name + 'admin/overview')
def make_comment(self, thread, comment_dict): ts = self.parse_date(comment_dict['date']) author_id = self.get_user_id(comment_dict['submitter']) text = h.really_unicode(self.comment_processing(comment_dict['comment'])) if not author_id and comment_dict['submitter']: text = u'*Originally posted by:* {0}\n\n{1}'.format( h.really_unicode(comment_dict['submitter']), text) comment = thread.post(text=text, timestamp=ts) comment.author_id = author_id
def make_comment(self, thread, comment_dict): ts = self.parse_date(comment_dict['date']) author_id = self.get_user_id(comment_dict['submitter']) text = h.really_unicode( self.comment_processing(comment_dict['comment'])) if not author_id and comment_dict['submitter']: text = '*Originally posted by:* {0}\n\n{1}'.format( h.really_unicode(comment_dict['submitter']), text) comment = thread.post(text=text, timestamp=ts) comment.author_id = author_id
def test_really_unicode(): here_dir = path.dirname(__file__) s = h.really_unicode('\xef\xbb\xbf<?xml version="1.0" encoding="utf-8" ?>') assert s.startswith(u'\ufeff') s = h.really_unicode(open(path.join(here_dir, 'data/unicode_test.txt')).read()) assert isinstance(s, unicode) # try non-ascii string in legacy 8bit encoding h.really_unicode(u'\u0410\u0401'.encode('cp1251')) # ensure invalid encodings are handled gracefully s = h._attempt_encodings('foo', ['LKDJFLDK']) assert isinstance(s, unicode)
def log(self, revs=None, path=None, exclude=None, id_only=True, **kw): """ Returns a generator that returns information about commits reachable by revs. revs can be None or a list or tuple of revisions, each of which can be anything parsable by self.commit(). If revs is None, the default branch head will be used. If path is not None, only commits which modify files under path will be included. Exclude can be None or a list or tuple of identifiers, each of which can be anything parsable by self.commit(). If not None, then any revisions reachable by any of the revisions in exclude will not be included. If id_only is True, returns only the commit ID, otherwise it returns detailed information about each commit. """ path = path.strip('/') if path else None if exclude is not None: revs.extend(['^%s' % e for e in exclude]) for ci, refs in self._iter_commits_with_refs(revs, '--', path): if id_only: yield ci.hexsha else: size = None if path: try: node = ci.tree/path size = node.size if node.type == 'blob' else None except KeyError as e: size = None yield { 'id': ci.hexsha, 'message': h.really_unicode(ci.message or '--none--'), 'authored': { 'name': h.really_unicode(ci.author.name or '--none--'), 'email': h.really_unicode(ci.author.email), 'date': datetime.utcfromtimestamp(ci.authored_date), }, 'committed': { 'name': h.really_unicode(ci.committer.name or '--none--'), 'email': h.really_unicode(ci.committer.email), 'date': datetime.utcfromtimestamp(ci.committed_date), }, 'refs': refs, 'parents': [pci.hexsha for pci in ci.parents], 'size': size, }
def refresh_tree_info(self, tree, seen, lazy=True): from allura.model.repo import TreeDoc if lazy and tree.hex() in seen: return seen.add(tree.hex()) doc = TreeDoc( dict(_id=tree.hex(), tree_ids=[], blob_ids=[], other_ids=[])) for name, t in tree.trees.iteritems(): self.refresh_tree_info(t, seen, lazy) doc.tree_ids.append(dict(name=h.really_unicode(name), id=t.hex())) for name, oid in tree.blobs.iteritems(): doc.blob_ids.append(dict(name=h.really_unicode(name), id=oid)) doc.m.save(safe=False) return doc
def diff(self, prev_commit, fmt=None, prev_file=None, **kw): ''' :param prev_commit: previous commit to compare against :param fmt: "sidebyside", or anything else for "unified" :param prev_file: previous filename, if different :return: ''' try: path, filename = os.path.split(self._blob.path()) a_ci = c.app.repo.commit(prev_commit) a = a_ci.get_path(prev_file or self._blob.path()) apath = a.path() except Exception: # prev commit doesn't have the file a = M.repository.EmptyBlob() apath = '' b = self._blob if not self._blob.has_html_view: diff = "Cannot display: file marked as a binary type." return dict(a=a, b=b, diff=diff) # could consider making Blob.__iter__ do unicode conversion? # py2 unified_diff can handle some unicode but not consistently, so best to do ensure_str (can drop it on py3) la = [six.ensure_str(h.really_unicode(line)) for line in a] lb = [six.ensure_str(h.really_unicode(line)) for line in b] adesc = 'a' + h.really_unicode(apath) bdesc = 'b' + h.really_unicode(b.path()) if not fmt: fmt = web_session.get('diformat', '') else: web_session['diformat'] = fmt web_session.save() if fmt == 'sidebyside': if max(a.size, b.size) > asint( tg.config.get('scm.view.max_syntax_highlight_bytes', 500000)): # have to check the original file size, not diff size, because difflib._mdiff inside HtmlSideBySideDiff # can take an extremely long time on large files (and its even a generator) diff = '<em>File too large for side-by-side view</em>' else: hd = HtmlSideBySideDiff() diff = hd.make_table(la, lb, adesc, bdesc) else: # py2 unified_diff can handle some unicode but not consistently, so best to do str() and ensure_str() # (can drop it on py3) diff = str('').join( difflib.unified_diff(la, lb, six.ensure_str(adesc), six.ensure_str(bdesc))) return dict(a=a, b=b, diff=diff)
def convert(self, source): if len(source) > asint(config.get('markdown_render_max_length', 40000)): # if text is too big, markdown can take a long time to process it, so we return it as a plain text log.info('Text is too big. Skipping markdown processing') escaped = cgi.escape(h.really_unicode(source)) return h.html.literal(u'<pre>%s</pre>' % escaped) try: return markdown.Markdown.convert(self, source) except Exception: log.info('Invalid markdown: %s', source, exc_info=True) escaped = h.really_unicode(source) escaped = cgi.escape(escaped) return h.html.literal(u"""<p><strong>ERROR!</strong> The markdown supplied could not be parsed correctly. Did you forget to surround a code snippet with "~~~~"?</p><pre>%s</pre>""" % escaped)
def make_artifact(self, ticket_dict): remapped = {} for f, v in six.iteritems(ticket_dict): transform = self.FIELD_MAP.get(f, ()) if transform is None: continue elif transform is True: remapped[f] = v elif callable(transform): transform(remapped, f, v) elif transform is (): self.custom(remapped, f, v, ticket_dict.get('status')) else: new_f, conv = transform remapped[new_f] = conv(v) description = h.really_unicode( self.description_processing(remapped['description'])) creator = owner = '' if ticket_dict.get('submitter') and not remapped.get('reported_by_id'): creator = '*Originally created by:* {0}\n'.format( h.really_unicode(ticket_dict['submitter'])) if ticket_dict.get( 'assigned_to') and not remapped.get('assigned_to_id'): owner = '*Originally owned by:* {0}\n'.format( h.really_unicode(ticket_dict['assigned_to'])) remapped['description'] = '{0}{1}{2}{3}'.format( creator, owner, '\n' if creator or owner else '', description) ticket_num = ticket_dict['id'] existing_ticket = TM.Ticket.query.get(app_config_id=c.app.config._id, ticket_num=ticket_num) if existing_ticket: ticket_num = c.app.globals.next_ticket_num() self.warnings.append( 'Ticket #%s: Ticket with this id already exists, using next available id: %s' % (ticket_dict['id'], ticket_num)) else: if c.app.globals.last_ticket_num < ticket_num: c.app.globals.last_ticket_num = ticket_num ThreadLocalORMSession.flush_all() ticket = TM.Ticket(app_config_id=c.app.config._id, custom_fields=dict(), ticket_num=ticket_num, import_id=ImportIdConverter.get().expand( ticket_dict['id'], c.app)) ticket.update(remapped) return ticket
def make_artifact(self, ticket_dict): remapped = {} for f, v in ticket_dict.iteritems(): transform = self.FIELD_MAP.get(f, ()) if transform is None: continue elif transform is True: remapped[f] = v elif callable(transform): transform(remapped, f, v) elif transform is (): self.custom(remapped, f, v, ticket_dict.get('status')) else: new_f, conv = transform remapped[new_f] = conv(v) description = h.really_unicode( self.description_processing(remapped['description'])) creator = owner = '' if ticket_dict.get('submitter') and not remapped.get('reported_by_id'): creator = u'*Originally created by:* {0}\n'.format( h.really_unicode(ticket_dict['submitter'])) if ticket_dict.get('assigned_to') and not remapped.get('assigned_to_id'): owner = u'*Originally owned by:* {0}\n'.format( h.really_unicode(ticket_dict['assigned_to'])) remapped['description'] = u'{0}{1}{2}{3}'.format(creator, owner, '\n' if creator or owner else '', description) ticket_num = ticket_dict['id'] existing_ticket = TM.Ticket.query.get(app_config_id=c.app.config._id, ticket_num=ticket_num) if existing_ticket: ticket_num = c.app.globals.next_ticket_num() self.warnings.append( 'Ticket #%s: Ticket with this id already exists, using next available id: %s' % (ticket_dict['id'], ticket_num)) else: if c.app.globals.last_ticket_num < ticket_num: c.app.globals.last_ticket_num = ticket_num ThreadLocalORMSession.flush_all() ticket = TM.Ticket( app_config_id=c.app.config._id, custom_fields=dict(), ticket_num=ticket_num, import_id=ImportIdConverter.get().expand(ticket_dict['id'], c.app)) ticket.update(remapped) return ticket
def save_attachment(cls, filename, fp, content_type=None, **kwargs): filename = h.really_unicode(filename) thumbnail_meta = dict(type="thumbnail", app_config_id=c.app.config._id) thumbnail_meta.update(kwargs) original_meta = dict(type="attachment", app_config_id=c.app.config._id) original_meta.update(kwargs) # Try to save as image, with thumbnail orig, thumbnail = cls.save_image(filename, fp, content_type=content_type, square=True, thumbnail_size=cls.thumbnail_size, thumbnail_meta=thumbnail_meta, save_original=True, original_meta=original_meta) if orig is not None: return orig, thumbnail else: # No, generic attachment # stream may have been partially consumed in a failed save_image # attempt fp.seek(0) return cls.from_stream(filename, fp, content_type=content_type, **original_meta)
def check(self, text, artifact=None, user=None, content_type='comment', **kw): """Basic content spam check via Mollom. For more options see http://mollom.com/api#api-content """ log_msg = text kw['postBody'] = text if artifact: try: # if its a comment, get wiki, ticket, etc URL url = artifact.main_url() except: url = artifact.url() # Should be able to send url, but can't right now due to a bug in # the PyMollom lib # kw['url'] = url log_msg = url user = user or c.user if user: kw['authorName'] = user.display_name or user.username kw['authorMail'] = user.email_addresses[ 0] if user.email_addresses else '' kw['authorIP'] = utils.ip_address(request) # kw will be urlencoded, need to utf8-encode for k, v in kw.items(): kw[k] = h.really_unicode(v).encode('utf8') cc = self.service.checkContent(**kw) res = cc['spam'] == 2 artifact.spam_check_id = cc.get('session_id', '') log.info("spam=%s (mollom): %s" % (str(res), log_msg)) return res
def get_data(self, text, artifact=None, user=None, content_type='comment', request=None, **kw): kw['comment_content'] = text kw['comment_type'] = content_type if artifact: try: # if its a comment, get wiki, ticket, etc URL url = artifact.main_url() except: url = artifact.url() kw['permalink'] = url user = user or c.user if user: kw['comment_author'] = user.display_name or user.username kw['comment_author_email'] = user.email_addresses[ 0] if user.email_addresses else '' if request: kw['user_ip'] = utils.ip_address(request) kw['user_agent'] = request.headers.get('USER_AGENT') kw['referrer'] = request.headers.get('REFERER') # kw will be urlencoded, need to utf8-encode for k, v in kw.items(): kw[k] = h.really_unicode(v).encode('utf8') return kw
def check(self, text, artifact=None, user=None, content_type='comment', **kw): """Basic content spam check via Mollom. For more options see http://mollom.com/api#api-content """ log_msg = text kw['postBody'] = text if artifact: # Should be able to send url, but can't right now due to a bug in # the PyMollom lib # kw['url'] = artifact.url() log_msg = artifact.url() user = user or c.user if user: kw['authorName'] = user.display_name or user.username kw['authorMail'] = user.email_addresses[0] if user.email_addresses else '' user_ip = request.headers.get('X_FORWARDED_FOR', request.remote_addr) kw['authorIP'] = user_ip.split(',')[0].strip() # kw will be urlencoded, need to utf8-encode for k, v in kw.items(): kw[k] = h.really_unicode(v).encode('utf8') cc = self.service.checkContent(**kw) res = cc['spam'] == 2 artifact.spam_check_id = cc.get('session_id','') log.info("spam=%s (mollom): %s" % (str(res), log_msg)) return res
def ls_old(self): # Load last commit info id_re = re.compile("^{0}:{1}:".format( self.repo._id, re.escape(h.really_unicode(self.path()).encode('utf-8')))) lc_index = dict((lc.name, lc.commit_info) for lc in LastCommitDoc_old.m.find(dict(_id=id_re))) # FIXME: Temporarily fall back to old, semi-broken lookup behavior until refresh is done oids = [ x.id for x in chain(self.tree_ids, self.blob_ids, self.other_ids) ] id_re = re.compile("^{0}:".format(self.repo._id)) lc_index.update( dict((lc.object_id, lc.commit_info) for lc in LastCommitDoc_old.m.find( dict(_id=id_re, object_id={'$in': oids})))) # /FIXME if not lc_index: # allow fallback to new method instead # of showing a bunch of Nones return [] results = [] def _get_last_commit(name, oid): lc = lc_index.get(name, lc_index.get(oid, None)) if lc is None: lc = dict(author=None, author_email=None, author_url=None, date=None, id=None, href=None, shortlink=None, summary=None) if 'href' not in lc: lc['href'] = self.repo.url_for_commit(lc['id']) return lc for x in sorted(self.tree_ids, key=lambda x: x.name): results.append( dict(kind='DIR', name=x.name, href=x.name + '/', last_commit=_get_last_commit(x.name, x.id))) for x in sorted(self.blob_ids, key=lambda x: x.name): results.append( dict(kind='FILE', name=x.name, href=x.name, last_commit=_get_last_commit(x.name, x.id))) for x in sorted(self.other_ids, key=lambda x: x.name): results.append( dict(kind=x.type, name=x.name, href=None, last_commit=_get_last_commit(x.name, x.id))) return results
def refresh_tree_info(self, tree, seen, lazy=True): from allura.model.repo import TreeDoc if lazy and tree.binsha in seen: return seen.add(tree.binsha) doc = TreeDoc(dict( _id=tree.hexsha, tree_ids=[], blob_ids=[], other_ids=[])) for o in tree: if o.type == 'submodule': continue obj = Object( name=h.really_unicode(o.name), id=o.hexsha) if o.type == 'tree': self.refresh_tree_info(o, seen, lazy) doc.tree_ids.append(obj) elif o.type == 'blob': doc.blob_ids.append(obj) else: obj.type = o.type doc.other_ids.append(obj) doc.m.save(safe=False) return doc
def __init__(self, title): self.title = h.really_unicode(unquote(title)) self.page = WM.Page.query.get(app_config_id=c.app.config._id, title=self.title) if self.page is not None: self.attachment = WikiAttachmentsController(self.page) c.create_page_lightbox = W.create_page_lightbox
def __init__(self, title): self.title = h.really_unicode(unquote(title)) self.page = WM.Page.query.get( app_config_id=c.app.config._id, title=self.title) if self.page is not None: self.attachment = WikiAttachmentsController(self.page) c.create_page_lightbox = W.create_page_lightbox
def highlight(self, text, lexer=None, filename=None): if not text: if lexer == 'diff': return h.html.literal('<em>File contents unchanged</em>') return h.html.literal('<em>Empty file</em>') # Don't use line numbers for diff highlight's, as per [#1484] if lexer == 'diff': formatter = pygments.formatters.HtmlFormatter( cssclass='codehilite', linenos=False) else: formatter = self.pygments_formatter if lexer is None: try: lexer = pygments.lexers.get_lexer_for_filename( filename, encoding='chardet') except pygments.util.ClassNotFound: # no highlighting, but we should escape, encode, and wrap it in # a <pre> text = h.really_unicode(text) text = cgi.escape(text) return h.html.literal(u'<pre>' + text + u'</pre>') else: lexer = pygments.lexers.get_lexer_by_name(lexer, encoding='chardet') return h.html.literal(pygments.highlight(text, lexer, formatter))
def highlight(self, text, lexer=None, filename=None): if not text: if lexer == 'diff': return h.html.literal('<em>File contents unchanged</em>') return h.html.literal('<em>Empty file</em>') # Don't use line numbers for diff highlight's, as per [#1484] if lexer == 'diff': formatter = pygments.formatters.HtmlFormatter( cssclass='codehilite', linenos=False) else: formatter = self.pygments_formatter if lexer is None: try: lexer = pygments.lexers.get_lexer_for_filename( filename, encoding='chardet') except pygments.util.ClassNotFound: # no highlighting, but we should escape, encode, and wrap it in # a <pre> text = h.really_unicode(text) text = cgi.escape(text) return h.html.literal(u'<pre>' + text + u'</pre>') else: lexer = pygments.lexers.get_lexer_by_name( lexer, encoding='chardet') return h.html.literal(pygments.highlight(text, lexer, formatter))
def convert(self, source, render_limit=True): if render_limit and len(source) > asint(config.get('markdown_render_max_length', 40000)): # if text is too big, markdown can take a long time to process it, # so we return it as a plain text log.info('Text is too big. Skipping markdown processing') escaped = cgi.escape(h.really_unicode(source)) return h.html.literal(u'<pre>%s</pre>' % escaped) try: return markdown.Markdown.convert(self, source) except Exception: log.info('Invalid markdown: %s Upwards trace is %s', source, ''.join(traceback.format_stack()), exc_info=True) escaped = h.really_unicode(source) escaped = cgi.escape(escaped) return h.html.literal(u"""<p><strong>ERROR!</strong> The markdown supplied could not be parsed correctly. Did you forget to surround a code snippet with "~~~~"?</p><pre>%s</pre>""" % escaped)
def __repr__(self): # this can't change, is used in hex() above lines = ['t %s %s' % (t.hex(), name) for name, t in self.trees.iteritems()] lines += ['b %s %s' % (oid, name) for name, oid in self.blobs.iteritems()] return h.really_unicode('\n'.join(sorted(lines))).encode('utf-8')
def _to_python(self, value, state): value = h.really_unicode(value or '').encode('utf-8').lower() neighborhood = M.Neighborhood.query.get(name=state.full_dict['neighborhood']) message = plugin.ProjectRegistrationProvider.get().name_taken(value, neighborhood) if message: raise formencode.Invalid(message, value, state) return value
def highlight(self, text, lexer=None, filename=None): if not text: if lexer == 'diff': return Markup('<em>File contents unchanged</em>') return Markup('<em>Empty file</em>') # Don't use line numbers for diff highlight's, as per [#1484] if lexer == 'diff': formatter = pygments.formatters.HtmlFormatter(cssclass='codehilite', linenos=False) else: formatter = self.pygments_formatter text = h.really_unicode(text) if lexer is None: if len(text) < asint(config.get('scm.view.max_syntax_highlight_bytes', 500000)): try: lexer = pygments.lexers.get_lexer_for_filename(filename, encoding='chardet') except pygments.util.ClassNotFound: pass else: lexer = pygments.lexers.get_lexer_by_name(lexer, encoding='chardet') if lexer is None or len(text) >= asint(config.get('scm.view.max_syntax_highlight_bytes', 500000)): # no highlighting, but we should escape, encode, and wrap it in # a <pre> text = cgi.escape(text) return Markup('<pre>' + text + '</pre>') else: return Markup(pygments.highlight(text, lexer, formatter))
def refresh_tree_info(self, tree, seen, lazy=True): from allura.model.repository import TreeDoc if lazy and tree.binsha in seen: return seen.add(tree.binsha) doc = TreeDoc(dict( _id=tree.hexsha, tree_ids=[], blob_ids=[], other_ids=[])) for o in tree: if o.type == 'submodule': continue obj = Object( name=h.really_unicode(o.name), id=o.hexsha) if o.type == 'tree': self.refresh_tree_info(o, seen, lazy) doc.tree_ids.append(obj) elif o.type == 'blob': doc.blob_ids.append(obj) else: obj.type = o.type doc.other_ids.append(obj) doc.m.save(safe=False) return doc
def __repr__(self): # this can't change, is used in hex() above lines = ['t %s %s' % (t.hex(), name) for name, t in self.trees.iteritems() ] lines += ['b %s %s' % (oid, name) for name, oid in self.blobs.iteritems() ] return h.really_unicode('\n'.join(sorted(lines))).encode('utf-8')
def merge(self, mr): g = self._impl._git.git # can't merge in bare repo, so need to clone tmp_path = tempfile.mkdtemp() try: tmp_repo = git.Repo.clone_from( self.full_fs_path, to_path=tmp_path, shared=True, bare=False) tmp_repo = GitImplementation(Object(full_fs_path=tmp_path))._git tmp_repo.git.fetch('origin', mr.target_branch) tmp_repo.git.checkout(mr.target_branch) tmp_repo.git.fetch(mr.downstream_repo.full_fs_path, mr.source_branch) author = h.really_unicode(c.user.display_name or c.user.username) tmp_repo.git.config('user.name', author.encode('utf8')) tmp_repo.git.config('user.email', 'allura@localhost') # a public email alias could be nice here msg = u'Merge {} branch {} into {}\n\n{}'.format( mr.downstream_repo.url(), mr.source_branch, mr.target_branch, h.absurl(mr.url())) tmp_repo.git.merge(mr.downstream.commit_id, '-m', msg) tmp_repo.git.push('origin', mr.target_branch) finally: shutil.rmtree(tmp_path, ignore_errors=True)
def check(self, text, artifact=None, user=None, content_type='comment', **kw): """Basic content spam check via Mollom. For more options see http://mollom.com/api#api-content """ log_msg = text kw['postBody'] = text if artifact: # Should be able to send url, but can't right now due to a bug in # the PyMollom lib # kw['url'] = artifact.url() log_msg = artifact.url() user = user or c.user if user: kw['authorName'] = user.display_name or user.username kw['authorMail'] = user.email_addresses[ 0] if user.email_addresses else '' user_ip = request.headers.get('X_FORWARDED_FOR', request.remote_addr) kw['authorIP'] = user_ip.split(',')[0].strip() # kw will be urlencoded, need to utf8-encode for k, v in kw.items(): kw[k] = h.really_unicode(v).encode('utf8') cc = self.service.checkContent(**kw) res = cc['spam'] == 2 artifact.spam_check_id = cc.get('session_id', '') log.info("spam=%s (mollom): %s" % (str(res), log_msg)) return res
def merge(self, mr): g = self._impl._git.git # can't merge in bare repo, so need to clone tmp_path = tempfile.mkdtemp() try: tmp_repo = git.Repo.clone_from(self.full_fs_path, to_path=tmp_path, shared=True, bare=False) tmp_repo = GitImplementation(Object(full_fs_path=tmp_path))._git tmp_repo.git.fetch('origin', mr.target_branch) tmp_repo.git.checkout(mr.target_branch) tmp_repo.git.fetch(mr.downstream_repo.full_fs_path, mr.source_branch) author = h.really_unicode(c.user.display_name or c.user.username) tmp_repo.git.config('user.name', author.encode('utf8')) tmp_repo.git.config( 'user.email', 'allura@localhost') # a public email alias could be nice here msg = u'Merge {} branch {} into {}\n\n{}'.format( mr.downstream_repo.url(), mr.source_branch, mr.target_branch, h.absurl(mr.url())) tmp_repo.git.merge(mr.downstream.commit_id, '-m', msg) tmp_repo.git.push('origin', mr.target_branch) finally: shutil.rmtree(tmp_path, ignore_errors=True)
def check_custom_field(self, field, value, ticket_status): field = c.app.globals.get_custom_field(field) if (field['type'] == 'select') and value: field_options = h.split_select_field_options( h.really_unicode(field['options'])) if value not in field_options: field['options'] = ' '.join([field['options'], value]) elif (field['type'] == 'milestone') and value: milestones = field['milestones'] for milestone in milestones: if milestone['name'] == value: if ticket_status in c.app.globals.open_status_names: milestone['complete'] = False break else: milestone = { 'due_date': '', 'complete': not ticket_status in c.app.globals.open_status_names, 'description': '', 'name': value, 'old_name': value } field['milestones'].append(milestone) ThreadLocalORMSession.flush_all()
def save(self, **kw): require_access(c.app, 'write') rate_limit() post = BM.BlogPost.new(**kw) g.spam_checker.check(kw['title'] + u'\n' + kw['text'], artifact=post, user=c.user, content_type='blog-post') redirect(h.really_unicode(post.url()).encode('utf-8'))
def update(self, title=None, text=None, labels=None, viewable_by=None, new_viewable_by=None, subscribe=False, **kw): activity_verb = 'created' if not title: flash('You must provide a title for the page.', 'error') redirect('edit') title = title.replace('/', '-') self.rate_limit(WM.Page, 'Page create/edit') if not self.page: # the page doesn't exist yet, so create it self.page = WM.Page.upsert(self.title) self.page.viewable_by = ['all'] else: require_access(self.page, 'edit') activity_verb = 'modified' name_conflict = None if self.page.title != title: name_conflict = WM.Page.query.find( dict(app_config_id=c.app.config._id, title=title, deleted=False)).first() if name_conflict: flash('There is already a page named "%s".' % title, 'error') else: if self.page.title == c.app.root_page_name: WM.Globals.query.get( app_config_id=c.app.config._id).root = title self.page.title = title activity_verb = 'renamed' self.page.text = text if labels: self.page.labels = labels.split(',') else: self.page.labels = [] self.page.commit(subscribe=subscribe) g.spam_checker.check(title + u'\n' + text, artifact=self.page, user=c.user, content_type='wiki') g.director.create_activity(c.user, activity_verb, self.page, related_nodes=[c.project], tags=['wiki']) if new_viewable_by: if new_viewable_by == 'all': self.page.viewable_by.append('all') else: user = c.project.user_in_project(str(new_viewable_by)) if user: self.page.viewable_by.append(user.username) if viewable_by: for u in viewable_by: if u.get('delete'): if u['id'] == 'all': self.page.viewable_by.remove('all') else: user = M.User.by_username(str(u['id'])) if user: self.page.viewable_by.remove(user.username) redirect('../' + h.really_unicode(self.page.title) .encode('utf-8') + ('/' if not name_conflict else '/edit'))
def set_options(self, show_discussion=False, allow_email_posting=False): self.app.config.options[ 'show_discussion'] = show_discussion and True or False self.app.config.options[ 'AllowEmailPosting'] = allow_email_posting and True or False flash('Blog options updated') redirect(h.really_unicode(c.project.url() + 'admin/tools') .encode('utf-8'))
def readme(self): 'returns (filename, unicode text) if a readme file is found' for x in self.blob_ids: if README_RE.match(x.name): name = x.name blob = self[name] return (x.name, h.really_unicode(blob.text)) return None, None
def sidebar_menu(self): try: page = request.path_info.split(self.url)[-1].split('/')[-2] page = h.really_unicode(page) page = WM.Page.query.find(dict(app_config_id=self.config._id, title=page, deleted=False)).first() except: page = None return self.create_common_wiki_menu(has_access(self, 'create'),c.app.url,'add_wiki_page')