def delete(self, version=None, db=None): assert self.exists, 'Cannot delete non-existent page' @self.env.with_transaction(db) def do_delete(db): cursor = db.cursor() if version is None: # Delete a wiki page completely cursor.execute("DELETE FROM wiki WHERE name=%s", (self.name, )) self.env.log.info('Deleted page %s' % self.name) else: # Delete only a specific page version cursor.execute("DELETE FROM wiki WHERE name=%s and version=%s", (self.name, version)) self.env.log.info('Deleted version %d of page %s' % (version, self.name)) if version is None or version == self.version: self._fetch(self.name, None, db) if not self.exists: # Invalidate page name cache del WikiSystem(self.env).pages # Delete orphaned attachments from trac.attachment import Attachment Attachment.delete_all(self.env, 'wiki', self.name, db) # Let change listeners know about the deletion if not self.exists: for listener in WikiSystem(self.env).change_listeners: listener.wiki_page_deleted(self) else: for listener in WikiSystem(self.env).change_listeners: if hasattr(listener, 'wiki_page_version_deleted'): listener.wiki_page_version_deleted(self)
def _get_page_operations(self, source_env, local_env): operations = {} # Open source and destination wikis source_wiki_system = WikiSystem(source_env) dest_wiki_system = WikiSystem(local_env) # Extract wiki pages from both wikis local_pages = [] for page in dest_wiki_system.get_pages(): local_pages.append(page) source_pages = [] for page in source_wiki_system.get_pages(): source_pages.append(page) operations[page] = 'create' # Create operations list for page in local_pages: if page in source_pages: operations[page] = 'update' # Do not update pages with identical contents for page, operation in operations.items(): local_page = WikiPage(self.env, page) source_page = WikiPage(source_env, page) if local_page.text == source_page.text: del operations[page] return operations
def rename(self, new_name): """Rename wiki page in-place, keeping the history intact. Renaming a page this way will eventually leave dangling references to the old page - which literally doesn't exist anymore. """ if not self.exists: raise TracError(_("Cannot rename non-existent page")) if not validate_page_name(new_name): raise TracError( _("Invalid Wiki page name '%(name)s'", name=new_name)) old_name = self.name with self.env.db_transaction as db: new_page = WikiPage(self.env, new_name) if new_page.exists: raise TracError( _("Can't rename to existing %(name)s page.", name=new_name)) db("UPDATE wiki SET name=%s WHERE name=%s", (new_name, old_name)) # Invalidate page name cache del WikiSystem(self.env).pages # Reparent attachments from trac.attachment import Attachment Attachment.reparent_all(self.env, self.realm, old_name, self.realm, new_name) self.name = new_name self.env.log.info("Renamed page %s to %s", old_name, new_name) for listener in WikiSystem(self.env).change_listeners: if hasattr(listener, 'wiki_page_renamed'): listener.wiki_page_renamed(self, old_name)
def filter_stream(self, req, method, filename, stream, data): if req.path_info.startswith('/wiki/'): if data and data.has_key('page') and hasattr( data['page'], 'resource'): title = data['title'] filter = Transformer('//div[@id="pagepath"]') filter = filter.empty() filter = filter.append( tag.a('wiki:', href=req.href.wiki(), class_='pathentry first')) resource = data['page'].resource relation_system = ResourceRelationSystem(self.env) tree = relation_system.get_cached_tree(req) add_stylesheet(req, 'wikimenu/css/superfish.css') add_script(req, 'wikimenu/js/jquery.bgiframe.min.js') add_script(req, 'wikimenu/js/superfish.js') add_script(req, 'wikimenu/js/popup.js') resources = [] for res in relation_system.get_ancestors(resource, tree=tree): resources.append(res) for res in reversed(resources): label = get_resource_description(self.env, res) if res.realm == 'wiki': if res.id and WikiSystem(self.env).has_page(res.id): menu = tag.ul() for res_child in relation_system.get_children(res): child_label = get_resource_description( self.env, res_child) if res_child.realm == 'wiki': if res_child.id and WikiSystem( self.env).has_page(res_child.id): anc = tag.a( child_label, href=req.href.wiki(child_label)) menu.append(tag.li(anc)) filter = filter.append( tag.ul(tag.li( tag.a(label, href=req.href.wiki(label)), menu), class_='wiki_menu')) if title != label: filter = filter.append( tag.span(' / ', class_='pathentry sep')) remove_tran = '//a[@title="View ' + title + '"]' return stream | filter return stream
def save(self, author, comment, remote_addr=None, t=None): """Save a new version of a page. :since 1.0.3: `remote_addr` is optional and deprecated, and will be removed in 1.3.1 """ if not validate_page_name(self.name): raise TracError( _("Invalid Wiki page name '%(name)s'", name=self.name)) new_text = self.text != self.old_text if not new_text and self.readonly == self.old_readonly: raise TracError(_("Page not modified")) t = t or datetime_now(utc) with self.env.db_transaction as db: if new_text: db( """INSERT INTO wiki (name, version, time, author, ipnr, text, comment, readonly) VALUES (%s,%s,%s,%s,%s,%s,%s,%s) """, (self.name, self.version + 1, to_utimestamp(t), author, remote_addr, self.text, comment, self.readonly)) self.version += 1 else: db("UPDATE wiki SET readonly=%s WHERE name=%s", (self.readonly, self.name)) if self.version == 1: # Invalidate page name cache del WikiSystem(self.env).pages self.author = author self.comment = comment self.time = t for listener in WikiSystem(self.env).change_listeners: if self.version == 1: listener.wiki_page_added(self) else: from trac.util import arity if arity(listener.wiki_page_changed) == 6: listener.wiki_page_changed(self, self.version, t, comment, author, remote_addr) else: listener.wiki_page_changed(self, self.version, t, comment, author) self.old_readonly = self.readonly self.old_text = self.text
def do_import(db): cursor = db.cursor() # Make sure we don't insert the exact same page twice cursor.execute("SELECT text FROM wiki WHERE name=%s " "ORDER BY version DESC LIMIT 1", (title,)) old = list(cursor) if old and title in create_only: printout(_(' %(title)s already exists', title=title)) result[0] = False return if old and data == old[0][0]: printout(_(' %(title)s is already up to date', title=title)) result[0] = False return if replace and old: cursor.execute("UPDATE wiki SET text=%s WHERE name=%s " " AND version=(SELECT max(version) FROM wiki " " WHERE name=%s)", (data, title, title)) else: cursor.execute("INSERT INTO wiki(version,name,time,author," " ipnr,text) " "SELECT 1+COALESCE(max(version),0),%s,%s," " 'trac','127.0.0.1',%s FROM wiki " "WHERE name=%s", (title, to_utimestamp(datetime.now(utc)), data, title)) if not old: del WikiSystem(self.env).pages
def load_conf(self): self.styles = {} self.template_filename = None wiki_system = WikiSystem(self.env) if not wiki_system.has_page('PageToOdtStyles'): raise Exception, 'Please create a PageToOdtStyles wiki page.' db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( "SELECT text FROM wiki WHERE name = 'PageToOdtStyles' ORDER BY version DESC LIMIT 1" ) for (text, ) in cursor: page_content = text break for line in page_content.strip().splitlines(): if line.find('=') != -1: name, value = [token.strip() for token in line.split("=", 2)] if name.startswith('style_'): self.styles[name[6:]] = value elif name == 'template': self.template_filename = os.path.join( self.env.path, 'attachments', 'wiki', 'PageToOdtStyles', value)
def _render_book(self, req, cat, page, component): req.perm.assert_permission('WIKIPRINTLATEX_BOOK') data = {} allpages = list(WikiSystem(self.env).get_pages()) rightpages = [ x for x in req.session.get('wikiprint_rightpages', '').split(',') if x ] if req.method == 'POST' and req.args.get('create'): rightpages = req.args.get('rightpages_all') title = req.args.get('title') or self.env.project_name subject = req.args.get('subject') date = req.args.get('date') version = req.args.get('version') req.session['wikiprint_rightpages'] = rightpages rightpages = rightpages.split(',') pdfbookname = title.replace(' ', '_').replace(':', '_').replace(',', '_') return self.process_wikiprintlatex(req, title, subject, rightpages, version, date, pdfbookname) data['allpages'] = allpages leftpages = [x for x in allpages if x not in rightpages] leftpages.sort() data['leftpages'] = leftpages data['rightpages'] = rightpages add_script(req, 'wikiprint/js/admin_wikiprintlatex.js') return 'admin_makebook.html', data
def _set_title(self, req, page, action): title = name = WikiSystem(self.env).format_page_name(page.name) if action: title += ' (%s)' % action req.hdf['wiki.page_name'] = name req.hdf['title'] = title return title
def _get_translations(self, prefix, base_page_name): res = [] for l in sorted(self.languages.keys()): tr = self._get_translated_page(prefix, base_page_name, l) if WikiSystem(self.env).has_page(tr): res.append(l) return res
def save(self, req): if req.args and req.args.has_key('action') \ and req.args['action'] == 'save': for key in SESSION_KEYS.values(): if req.args.has_key(key): if key == 'wiki.href': wiki_href = req.args[key] if wiki_href == '': req.session[key] = '' continue validated = WikiSystem(self.env).has_page(wiki_href) if validated: req.session[key] = req.args[key] else: add_warning(req, Markup(tag.span(Markup(_( "%(page)s is not a valid Wiki page", page=tag.b(wiki_href) ))))) elif key == 'tickets.href': ticket_href = req.args[key] if ticket_href == '': req.session[key] = '' continue reports = self.get_report_list() self.log.info('reports: %s' % reports) if ticket_href in ('report', 'query') \ or as_int(ticket_href, 0) in reports: req.session[key] = req.args[key] else: add_warning(req, Markup(tag.span(Markup(_( "%(report)s is not a valid report", report=tag.b(ticket_href) ))))) else: req.session[key] = req.args[key]
def render_macro(self, req, name, args): # Args seperated by commas: # prefix,level # # Page Name prefix to search for. # how many 'levels' in the hierarchy to go down. prefix = req.hdf.getValue('wiki.page_name', '') + '/' level = 0 if args: args = args.replace('\'', '\'\'') args = args.split(',') if args[0] != 'None': prefix = args[0] if len(args) > 1 and args[1] != 'None': level = int(args[1]) pages = WikiSystem(self.env).get_pages(prefix) good_pages = [] for p in pages: if level: len_name = p.split('/') if len(len_name) > level+1: continue page = WikiPage(self.env, p) md = self.TITLE_RE.search(page.text) title = '' if md: title = md.group(1) good_pages.append((p, title)) return html.UL([html.LI(html.A(p, title=t, href=req.href.wiki(p)), ' ', t) for p,t in good_pages])
def trac_get_reference(env, context, rawtext, target, text): fulltext = target + ' ' + text if text else target link = extract_link(env, context, fulltext) uri = None missing = False if isinstance(link, (Element, Fragment)): linktext = Markup(link).striptags() # the following is a bit hackish, but it takes into account: # - an eventual trailing '?' for missing wiki pages # - space eventually introduced due to split_page_names option if linktext.rstrip('?').replace(' ', '') != target: text = linktext elt = find_element(link, 'href', 'missing') if elt is not None: uri = elt.attrib.get('href', '') missing = 'missing' in elt.attrib.get('class', '').split() else: uri = context.href.wiki(target) missing = not WikiSystem(env).has_page(target) if uri or missing: reference = nodes.reference(rawtext, text or target) reference['refuri'] = uri if missing: reference['classes'].append('missing') return reference
def rename(self, new_name): """Rename wiki page in-place, keeping the history intact. Renaming a page this way will eventually leave dangling references to the old page - which litterally doesn't exist anymore. """ assert self.exists, 'Cannot rename non-existent page' old_name = self.name @self.env.with_transaction() def do_rename(db): cursor = db.cursor() new_page = WikiPage(self.env, new_name, db=db) if new_page.exists: raise TracError( _("Can't rename to existing %(name)s page.", name=new_name)) cursor.execute("UPDATE wiki SET name=%s WHERE name=%s", (new_name, old_name)) # Invalidate page name cache del WikiSystem(self.env).pages # Reparent attachments from trac.attachment import Attachment Attachment.reparent_all(self.env, 'wiki', old_name, 'wiki', new_name) self.name = new_name self.env.log.info('Renamed page %s to %s', old_name, new_name) for listener in WikiSystem(self.env).change_listeners: if hasattr(listener, 'wiki_page_renamed'): listener.wiki_page_renamed(self, old_name)
def __loadTemplatePage(self,pagina): """ Carga la plantilla desde los adjuntos de una página wiki concreta """ page_content = '' template_list = [] template_file = [] wiki_system = WikiSystem(self.env) db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute(self.CONF_QUERY) for (text, ) in cursor: page_content = text self.env.log.debug('Accedo a la pagina de plantillas %s', page_content) break for line in page_content.strip().splitlines(): if line.find('=') != -1: name, value = [token.strip() for token in line.split("=", 2)] self.env.log.debug('-------> %s %s',name, value) if name == 'template_list': template_list = value.split(', ') if name == 'template_file': template_file = value.split(', ') i = 0 for element in template_list: self.env.log.debug('->>>>>>> %s', pagina) if re.match('.*' + element + '.*', pagina): return os.path.join(self.env.path, 'attachments', 'wiki','WikiTemplateConf', template_file[i]) i = i + 1 return None
def expand_macro(self, formatter, name, args): curpage = formatter.resource.id # scoped TOC (e.g. TranslateRu/Guide or 0.X/Guide ...) prefix = '' guideprefix = GUIDE_NAME + '/' data = { 'guide': GUIDE_NAME, } idx = curpage.find('/') if idx > 0: prefix = curpage[:idx + 1] if prefix.endswith(guideprefix): prefix = prefix[:len(prefix) - len(guideprefix)] ws = WikiSystem(self.env) return tag.div( tag.h4(_('Table of Contents')), tag.ul([ tag.li(tag.a(title, href=formatter.href.wiki(prefix + ref % data), class_=(not ws.has_page(prefix + ref % data) and 'missing')), class_=(prefix + ref % data == curpage and 'active')) for ref, title in self.TOC ]), class_='wiki-toc')
def save(self, author, comment, t=None, replace=False): """Save a new version of a page.""" if not validate_page_name(self.name): raise TracError( _("Invalid Wiki page name '%(name)s'", name=self.name)) new_text = self.text != self.old_text if not new_text and self.readonly == self.old_readonly: raise TracError(_("Page not modified")) t = t or datetime_now(utc) with self.env.db_transaction as db: if new_text: if replace and self.version != 0: db( """ UPDATE wiki SET text=%s WHERE name=%s AND version=%s """, (self.text, self.name, self.version)) else: self.version += 1 db( """INSERT INTO wiki (name,version,time,author,text,comment,readonly) VALUES (%s,%s,%s,%s,%s,%s,%s) """, (self.name, self.version, to_utimestamp(t), author, self.text, comment, self.readonly)) else: db("UPDATE wiki SET readonly=%s WHERE name=%s", (self.readonly, self.name)) if self.version == 1: # Invalidate page name cache del WikiSystem(self.env).pages self.author = author self.comment = comment self.time = t for listener in WikiSystem(self.env).change_listeners: with self.env.component_guard(listener): if self.version == 1: listener.wiki_page_added(self) else: listener.wiki_page_changed(self, self.version, t, comment, author) self.old_readonly = self.readonly self.old_text = self.text
def render(self, context, mimetype, content, filename=None, rev=None): # Minimize visual impact of errors class TracHTMLTranslator(html4css1.HTMLTranslator): """Specialized translator with unobtrusive error reporting and some extra security features""" def __init__(self, *args, **kwargs): self._render_unsafe_content = wikisys.render_unsafe_content self._safe_schemes = set(wikisys.safe_schemes) html4css1.HTMLTranslator.__init__(self, *args, **kwargs) def visit_system_message(self, node): paragraph = node.children.pop(0) message = escape(paragraph.astext()) if paragraph else '' backrefs = node['backrefs'] if backrefs: span = ('<span class="system-message">%s</span>' % (''.join('<a href="#%s" title="%s">?</a>' % (backref, message) for backref in backrefs))) else: span = ('<span class="system-message" title="%s">?</span>' % message) self.body.append(span) def depart_system_message(self, node): pass def visit_image(self, node): html4css1.HTMLTranslator.visit_image(self, node) uri = node.attributes.get('uri') if not wikisys.is_safe_origin(uri, context.req): self.body[-1] = self.body[-1].replace( '<img ', '<img crossorigin="anonymous" ') def visit_reference(self, node): if self._is_safe_uri(node.get('refuri')): html4css1.HTMLTranslator.visit_reference(self, node) def depart_reference(self, node): if self._is_safe_uri(node.get('refuri')): html4css1.HTMLTranslator.depart_reference(self, node) def _is_safe_uri(self, uri): if self._render_unsafe_content or not uri: return True else: pos = uri.find(':') return pos < 0 or uri[0:pos] in self._safe_schemes wikisys = WikiSystem(self.env) writer = html4css1.Writer() writer.translator_class = TracHTMLTranslator inliner = rst.states.Inliner() inliner.trac = (self.env, context) parser = rst.Parser(inliner=inliner) content = content_to_unicode(self.env, content, mimetype) # The default Reader is explicitly passed as a workaround for #11248 parts = publish_parts(content, writer=writer, parser=parser, reader=standalone.Reader(parser), settings_overrides={'halt_level': 6, 'file_insertion_enabled': 0, 'raw_enabled': 0, 'warning_stream': False}) return parts['html_body']
def _get_problems(self, silent): res = u"" resargs = u"" respages = u"" base_pages = [] for page in sorted(WikiSystem(self.env).get_pages()): for line in WikiPage(self.env, page).text.replace('\r', '').split(u'\n'): regres = self.macro_re.search(line) if regres != None: (prefix, base_page_name, lang_code) = self._get_page_info(page) basename = self._get_translated_page(prefix, \ base_page_name, self.base_lang) if not basename in base_pages: base_pages.append(basename) resargs += self._check_args(page, regres.group(1), lang_code) if self.languages.get(lang_code, None) == None: respages += "||[[wiki:/%s]]||Translated page language code unknown||\n" % page base_pages.sort() for base_page in base_pages: (prefix, page, lang_code) = self._get_page_info(base_page) translations = self._get_translations(prefix, page) basever = 0 if not self.base_lang in translations: respages += "||[[wiki:/%s]]||Base language is missing for translated pages||\n" % base_page else: basever = WikiPage(self.env, base_page).version for translation in translations: transpage = self._get_translated_page(prefix, page, translation) regres = self.macro_re.search( WikiPage(self.env, transpage).text) if regres != None: argstr = regres.group(1) if argstr != None and len(argstr) > 0: args, kw = parse_args(argstr) try: rev = int(kw[u'revision']) if rev != 0 and rev > basever: respages += "||[[wiki:/%s]]||Revision %s is higher than base revision %s||\n" \ % (transpage, rev, basever) except: pass else: respages += "||[[wiki:/%s]]||Translated page misses macro 'TranslatedPages'||\n" % transpage if len(resargs): res += u"=== Errors in supplied arguments ===\n||= Page =||= Arguments =||= Issue =||\n" + resargs if len(respages): res += u"=== Errors in page structure ===\n||= Page =||= Issue =||\n" + respages if not len(res): if (silent): return u" " res = u'none\n' return u"== Problem pages ==\n" + res
def save(self, author, comment, remote_addr, t=None, db=None): """Save a new version of a page. :since 0.13: the `db` parameter is no longer needed and will be removed in version 0.14 """ if not validate_page_name(self.name): raise TracError( _("Invalid Wiki page name '%(name)s'", name=self.name)) new_text = self.text != self.old_text if not new_text and self.readonly == self.old_readonly: raise TracError(_("Page not modified")) t = t or datetime.now(utc) with self.env.db_transaction as db: if new_text: db( """INSERT INTO wiki (name, version, time, author, ipnr, text, comment, readonly) VALUES (%s,%s,%s,%s,%s,%s,%s,%s) """, (self.name, self.version + 1, to_utimestamp(t), author, remote_addr, self.text, comment, self.readonly)) self.version += 1 self.resource = self.resource(version=self.version) else: db("UPDATE wiki SET readonly=%s WHERE name=%s", (self.readonly, self.name)) if self.version == 1: # Invalidate page name cache del WikiSystem(self.env).pages self.author = author self.comment = comment self.time = t for listener in WikiSystem(self.env).change_listeners: if self.version == 1: listener.wiki_page_added(self) else: listener.wiki_page_changed(self, self.version, t, comment, author, remote_addr) self.old_readonly = self.readonly self.old_text = self.text
def _reindex_wiki(self, realm, feedback, finish_fb): def check(page, status): return status is None or page.time > to_datetime(int(status)) resources = (WikiPage(self.env, name) for name in WikiSystem(self.env).get_pages()) index = self.wiki_page_added return self._index(realm, resources, check, index, feedback, finish_fb)
def get_wikipages(self): """ yield all the tickets per self._env and self._milestones """ w = WikiSystem(self._env) for page in w.pages: page = WikiPage(self._env, page) yield page
def _known_macros(self): macros = {} macros.update((name, '(built-in)') for name in self._builtin_macros) macros.update((name, provider.get_macro_description(name)) for provider in WikiSystem(self.env).macro_providers for name in provider.get_macros() or () if name not in macros) return macros
def render_macro(self, req, name, content): prefix = content or None wiki = WikiSystem(self.env) return html.UL([html.LI(html.A(wiki.format_page_name(page), href=req.href.wiki(page))) for page in sorted(wiki.get_pages(prefix))])
def pages(self, db): # retrieve wiki contents for field help pages = {} prefix_len = len(FieldTooltip._wiki_prefix) wiki_pages = WikiSystem(self.env).get_pages(FieldTooltip._wiki_prefix) for page in wiki_pages: pages[page[prefix_len:]] = WikiPage(self.env, page, db=db).text return pages
def sanitize_attrib(env, element): if not WikiSystem(env).render_unsafe_content: sanitized = getattr(tag, element.tag.localname) for k, data, pos in (Stream(element) | TracHTMLSanitizer()): sanitized.attrib = data[1] break # only look at START element = sanitized return element
def process_request(self, req): req.hdf['trac.href.blog'] = req.href.blog() entries = [] for page_name in WikiSystem(self.env).get_pages(prefix='Blog'): page = WikiPage(self.env, page_name) title = page_name text = page.text match = title_split_match(page.text) if match: title = match.group(1) text = match.group(2) comments = text.count('[[SimpleBlogComment(') cutoff = text.find('[[SimpleBlogComment(') if cutoff >= 0: text = text[:cutoff].rstrip() description = wiki_to_html(text, self.env, req) original = self._get_original_post_info(page_name) event = { 'href': self.env.href.wiki(page_name), 'title': title, 'description': description, 'escaped': Markup.escape(unicode(description)), 'date': format_datetime(original['time']), 'rfcdate': http_date(original['time']), 'author': original['author'], 'comment': original['comment'], 'comments': comments, } if page.version > 1: event['updated.version'] = page.version event['updated.date'] = format_datetime(page.time) event['updated.rfcdate'] = http_date(page.time) event['updated.author'] = page.author event['updated.comment'] = page.comment entries.append((original['time'], event)) entries.sort() entries.reverse() max_count = 20 if len(entries) > max_count: entries = entries[:max_count] events = [] for date, event in entries: events.append(event) req.hdf['blog.events'] = events format = req.args.get('format') if format == 'rss': return 'blog_rss.cs', 'application/rss+xml' add_link(req, 'alternate', self.env.href.blog(format='rss'), 'RSS Feed', 'application/rss+xml', 'rss') return 'blog.cs', None
def link_resolvers(self): if not self._link_resolvers: from trac.wiki.api import WikiSystem resolvers = {} for resolver in WikiSystem(self.env).syntax_providers: for namespace, handler in resolver.get_link_resolvers() or []: resolvers[namespace] = handler self._link_resolvers = resolvers return self._link_resolvers
def sanitize_attrib(env, element): if not WikiSystem(env).render_unsafe_content: if arity(sanitizer.sanitize_attrs) == 1: sanitized = sanitizer.sanitize_attrs(element.attrib) else: # Trac 1.3.2+ sanitized = sanitizer.sanitize_attrs(element.tag, element.attrib) element = Element(element.tag, **sanitized) return element
def _update_pages(self): all_pages = WikiSystem(self.env).get_pages() self.pages = set( [p for p in all_pages if len(p) >= self.minimum_length]) exclude = set([p.strip() for p in (self.exclude or '') if p.strip()]) self.pages.difference_update(exclude) explicitly_wikified = set( [p.strip() for p in (self.explicitly_wikify or '') if p.strip()]) self.pages.update(explicitly_wikified)