def Main(opts): """ Cross your fingers and pray """ env = Environment(opts.envpath) from tractags.api import TagSystem tlist = opts.tags or split_tags(env.config.get('blog', 'default_tag', 'blog')) tags = TagSystem(env) req = Mock(perm=MockPerm()) blog = tags.query(req, ' '.join(tlist + ['realm:wiki'])) cnx = env.get_db_cnx() for resource, page_tags in list(blog): try: page = WikiPage(env, version=1, name=resource.id) _, publish_time, author, _, _ = page.get_history().next() if opts.deleteonly: page.delete() continue categories = ' '.join([t for t in page_tags if t not in tlist]) page = WikiPage(env, name=resource.id) for version, version_time, version_author, version_comment, \ _ in page.get_history(): # Currently the basename of the post url is used due to # http://trac-hacks.org/ticket/2956 #name = resource.id.replace('/', '_') name = resource.id # extract title from text: fulltext = page.text match = _title_split_match(fulltext) if match: title = match.group(1) fulltext = match.group(2) else: title = name body = fulltext print "Adding post %s, v%s: %s" % (name, version, title) insert_blog_post(cnx, name, version, title, body, publish_time, version_time, version_comment, version_author, author, categories) reparent_blog_attachments(env, resource.id, name) continue cnx.commit() if opts.delete: page.delete() continue except: env.log.debug("Error loading wiki page %s" % resource.id, exc_info=True) print "Failed to add post %s, v%s: %s" % (name, version, title) cnx.rollback() cnx.close() return 1 cnx.close() return 0
def Main(opts): """ Cross your fingers and pray """ env = Environment(opts.envpath) from tractags.api import TagSystem tlist = opts.tags or split_tags( env.config.get('blog', 'default_tag', 'blog')) tags = TagSystem(env) req = Mock(perm=MockPerm()) blog = tags.query(req, ' '.join(tlist + ['realm:wiki'])) cnx = env.get_db_cnx() for resource, page_tags in list(blog): try: page = WikiPage(env, version=1, name=resource.id) _, publish_time, author, _, _ = page.get_history().next() if opts.deleteonly: page.delete() continue categories = ' '.join([t for t in page_tags if t not in tlist]) page = WikiPage(env, name=resource.id) for version, version_time, version_author, version_comment, \ _ in page.get_history(): # Currently the basename of the post url is used due to # http://trac-hacks.org/ticket/2956 #name = resource.id.replace('/', '_') name = resource.id # extract title from text: fulltext = page.text match = _title_split_match(fulltext) if match: title = match.group(1) fulltext = match.group(2) else: title = name body = fulltext print "Adding post %s, v%s: %s" % (name, version, title) insert_blog_post(cnx, name, version, title, body, publish_time, version_time, version_comment, version_author, author, categories) reparent_blog_attachments(env, resource.id, name) continue cnx.commit() if opts.delete: page.delete() continue except: env.log.debug("Error loading wiki page %s" % resource.id, exc_info=True) print "Failed to add post %s, v%s: %s" % (name, version, title) cnx.rollback() cnx.close() return 1 cnx.close() return 0
def get_default_version(self, name): """Return default viewable version number for the named page. Called when the wiki UI is rendering a page with no 'version=' in the HTTP request arguments. In this method, you'd normally do db lookups and so on to determine what the default viewable version of the named page is, and then return it; for this example, we're just going to pick a random page version instead. """ db = self.env.get_db_cnx() page = WikiPage(self.env, name, 1) history = page.get_history(all_versions=1) try: # get highest version number (version,time,author,comment,ipnr) = history.next() # here's a twist; we can enable Wikipedia-style talk # pages which are *not* under workflow control if name.endswith("/Talk"): return version # make some debug noise import random, sys print >>sys.stderr, "highest version", version # randomize the version number version = random.randint(1,version) print >>sys.stderr, "random version", version return version except: # returning None means that the default wiki "last edit # wins" behavior will be used instead return None
def getPageInfo(self, req, pagename, version=None): """ Returns information about the given page. """ page = WikiPage(self.env, pagename, version) if page.exists: last_update = page.get_history().next() return self._page_info(page.name, last_update[1], last_update[2], page.version)
def getPageInfo(self, req, pagename, version=None): """ Returns information about the given page. """ page = WikiPage(self.env, pagename, version) req.perm(page.resource).require("WIKI_VIEW") if page.exists: last_update = page.get_history().next() return self._page_info(page.name, last_update[1], last_update[2], page.version, page.comment)
def getPageInfo(self, req, pagename, version=None): """ Returns information about the given page. """ page = WikiPage(self.env, pagename, version) req.perm(page.resource).require('WIKI_VIEW') if page.exists: last_update = page.get_history().next() return self._page_info(page.name, last_update[1], last_update[2], page.version, page.comment)
def _render_view(self, req, page): version = page.resource.version # Add registered converters if page.exists: for conversion in Mimeview(self.env) \ .get_supported_conversions('text/x-trac-wiki'): conversion_href = req.href.wiki(page.name, version=version, format=conversion.key) add_link(req, 'alternate', conversion_href, conversion.name, conversion.in_mimetype) data = self._page_data(req, page) if page.name == self.START_PAGE: data['title'] = '' ws = WikiSystem(self.env) context = web_context(req, page.resource) higher, related = [], [] if not page.exists: if 'WIKI_CREATE' not in req.perm(page.resource): raise ResourceNotFound(_("Page %(name)s not found", name=page.name)) formatter = OneLinerFormatter(self.env, context) if '/' in page.name: parts = page.name.split('/') for i in xrange(len(parts) - 2, -1, -1): name = '/'.join(parts[:i] + [parts[-1]]) if not ws.has_page(name): higher.append(ws._format_link(formatter, 'wiki', '/' + name, name, False)) else: name = page.name name = name.lower() related = [each for each in ws.pages if name in each.lower() and 'WIKI_VIEW' in req.perm(self.realm, each)] related.sort() related = [ws._format_link(formatter, 'wiki', '/' + each, each, False) for each in related] latest_page = WikiPage(self.env, page.name) prev_version = next_version = None if version: version = as_int(version, None) if version is not None: for hist in latest_page.get_history(): v = hist[0] if v != version: if v < version: if not prev_version: prev_version = v break else: next_version = v prefix = self.PAGE_TEMPLATES_PREFIX templates = [template[len(prefix):] for template in ws.get_pages(prefix) if 'WIKI_VIEW' in req.perm(self.realm, template)] # -- prev/up/next links if prev_version: add_link(req, 'prev', req.href.wiki(page.name, version=prev_version), _("Version %(num)s", num=prev_version)) parent = None if version: add_link(req, 'up', req.href.wiki(page.name, version=None), _("View latest version")) elif '/' in page.name: parent = page.name[:page.name.rindex('/')] add_link(req, 'up', req.href.wiki(parent, version=None), _("View parent page")) if next_version: add_link(req, 'next', req.href.wiki(page.name, version=next_version), _('Version %(num)s', num=next_version)) # Add ctxtnav entries if version: prevnext_nav(req, _("Previous Version"), _("Next Version"), _("View Latest Version")) else: if parent: add_ctxtnav(req, _('Up'), req.href.wiki(parent)) self._wiki_ctxtnav(req, page) # Plugin content validation fields = {'text': page.text} for manipulator in self.page_manipulators: manipulator.prepare_wiki_page(req, page, fields) text = fields.get('text', '') data.update({ 'context': context, 'text': text, 'latest_version': latest_page.version, 'attachments': AttachmentModule(self.env).attachment_data(context), 'start_page': self.START_PAGE, 'default_template': self.DEFAULT_PAGE_TEMPLATE, 'templates': templates, 'version': version, 'higher': higher, 'related': related, 'resourcepath_template': 'wiki_page_path.html', 'fullwidth': req.session.get('wiki_fullwidth'), }) add_script(req, 'common/js/wiki.js') return 'wiki_view.html', data
def _render_diff(self, req, page): if not page.exists: raise TracError(_("Version %(num)s of page \"%(name)s\" does not " "exist", num=req.args.get('version'), name=page.name)) old_version = req.args.getint('old_version') if old_version: if old_version == page.version: old_version = None elif old_version > page.version: # FIXME: what about reverse diffs? old_version = page.resource.version page = WikiPage(self.env, page.name, old_version) req.perm(page.resource).require('WIKI_VIEW') latest_page = WikiPage(self.env, page.name) req.perm(latest_page.resource).require('WIKI_VIEW') new_version = page.version date = author = comment = None num_changes = 0 prev_version = next_version = None for version, t, a, c in latest_page.get_history(): if version == new_version: date = t author = a or 'anonymous' comment = c or '--' else: if version < new_version: num_changes += 1 if not prev_version: prev_version = version if old_version is None or version == old_version: old_version = version break else: next_version = version if not old_version: old_version = 0 old_page = WikiPage(self.env, page.name, old_version) req.perm(old_page.resource).require('WIKI_VIEW') # -- text diffs old_text = old_page.text.splitlines() new_text = page.text.splitlines() diff_data, changes = self._prepare_diff(req, page, old_text, new_text, old_version, new_version) # -- prev/up/next links if prev_version: add_link(req, 'prev', req.href.wiki(page.name, action='diff', version=prev_version), _("Version %(num)s", num=prev_version)) add_link(req, 'up', req.href.wiki(page.name, action='history'), _('Page history')) if next_version: add_link(req, 'next', req.href.wiki(page.name, action='diff', version=next_version), _("Version %(num)s", num=next_version)) data = self._page_data(req, page, 'diff') data.update({ 'change': {'date': date, 'author': author, 'comment': comment}, 'new_version': new_version, 'old_version': old_version, 'latest_version': latest_page.version, 'num_changes': num_changes, 'longcol': 'Version', 'shortcol': 'v', 'changes': changes, 'diff': diff_data, 'can_edit_comment': 'WIKI_ADMIN' in req.perm(page.resource), }) prevnext_nav(req, _("Previous Change"), _("Next Change"), _("Wiki History")) return 'wiki_diff.html', data
def _render_view(self, req, page): version = page.resource.version # Add registered converters if page.exists: for conversion in Mimeview(self.env).get_supported_conversions( 'text/x-trac-wiki'): conversion_href = req.href.wiki(page.name, version=version, format=conversion[0]) # or... conversion_href = get_resource_url(self.env, page.resource, req.href, format=conversion[0]) add_link(req, 'alternate', conversion_href, conversion[1], conversion[3]) data = self._page_data(req, page) if page.name == 'WikiStart': data['title'] = '' ws = WikiSystem(self.env) context = web_context(req, page.resource) higher, related = [], [] if not page.exists: if 'WIKI_CREATE' not in req.perm(page.resource): raise ResourceNotFound(_('Page %(name)s not found', name=page.name)) formatter = OneLinerFormatter(self.env, context) if '/' in page.name: parts = page.name.split('/') for i in range(len(parts) - 2, -1, -1): name = '/'.join(parts[:i] + [parts[-1]]) if not ws.has_page(name): higher.append(ws._format_link(formatter, 'wiki', '/' + name, name, False)) else: name = page.name name = name.lower() related = [each for each in ws.pages if name in each.lower() and 'WIKI_VIEW' in req.perm('wiki', each)] related.sort() related = [ws._format_link(formatter, 'wiki', '/' + each, each, False) for each in related] latest_page = WikiPage(self.env, page.name, version=None) req.perm(latest_page.resource).require('WIKI_VIEW') prev_version = next_version = None if version: try: version = int(version) for hist in latest_page.get_history(): v = hist[0] if v != version: if v < version: if not prev_version: prev_version = v break else: next_version = v except ValueError: version = None prefix = self.PAGE_TEMPLATES_PREFIX templates = [template[len(prefix):] for template in ws.get_pages(prefix) if 'WIKI_VIEW' in req.perm('wiki', template)] # -- prev/up/next links if prev_version: add_link(req, 'prev', req.href.wiki(page.name, version=prev_version), _('Version %(num)s', num=prev_version)) parent = None if version: add_link(req, 'up', req.href.wiki(page.name, version=None), _('View latest version')) elif '/' in page.name: parent = page.name[:page.name.rindex('/')] add_link(req, 'up', req.href.wiki(parent, version=None), _("View parent page")) if next_version: add_link(req, 'next', req.href.wiki(page.name, version=next_version), _('Version %(num)s', num=next_version)) # Add ctxtnav entries if version: prevnext_nav(req, _('Previous Version'), _('Next Version'), _('View Latest Version')) else: if parent: add_ctxtnav(req, _('Up'), req.href.wiki(parent)) self._wiki_ctxtnav(req, page) # Plugin content validation fields = {'text': page.text} for manipulator in self.page_manipulators: manipulator.prepare_wiki_page(req, page, fields) text = fields.get('text', '') data.update({ 'context': context, 'text': text, 'latest_version': latest_page.version, 'attachments': AttachmentModule(self.env).attachment_data(context), 'default_template': self.DEFAULT_PAGE_TEMPLATE, 'templates': templates, 'version': version, 'higher': higher, 'related': related, 'resourcepath_template': 'wiki_page_path.html', }) add_script(req, 'common/js/folding.js') return 'wiki_view.html', data, None
def _render_diff(self, req, page): if not page.exists: raise TracError(_('Version %(num)s of page "%(name)s" does not ' 'exist', num=req.args.get('version'), name=page.name)) old_version = req.args.get('old_version') if old_version: old_version = int(old_version) if old_version == page.version: old_version = None elif old_version > page.version: # FIXME: what about reverse diffs? old_version = page.resource.version page = WikiPage(self.env, page.name, version=old_version) req.perm(page.resource).require('WIKI_VIEW') latest_page = WikiPage(self.env, page.name, version=None) req.perm(latest_page.resource).require('WIKI_VIEW') new_version = int(page.version) date = author = comment = ipnr = None num_changes = 0 prev_version = next_version = None for version, t, a, c, i in latest_page.get_history(): if version == new_version: date = t author = a or 'anonymous' comment = c or '--' ipnr = i or '' else: if version < new_version: num_changes += 1 if not prev_version: prev_version = version if old_version is None or version == old_version: old_version = version break else: next_version = version if not old_version: old_version = 0 old_page = WikiPage(self.env, page.name, old_version) req.perm(old_page.resource).require('WIKI_VIEW') # -- text diffs old_text = old_page.text.splitlines() new_text = page.text.splitlines() diff_data, changes = self._prepare_diff(req, page, old_text, new_text, old_version, new_version) # -- prev/up/next links if prev_version: add_link(req, 'prev', req.href.wiki(page.name, action='diff', version=prev_version), _('Version %(num)s', num=prev_version)) add_link(req, 'up', req.href.wiki(page.name, action='history'), _('Page history')) if next_version: add_link(req, 'next', req.href.wiki(page.name, action='diff', version=next_version), _('Version %(num)s', num=next_version)) data = self._page_data(req, page, 'diff') data.update({ 'change': {'date': date, 'author': author, 'ipnr': ipnr, 'comment': comment}, 'new_version': new_version, 'old_version': old_version, 'latest_version': latest_page.version, 'num_changes': num_changes, 'longcol': 'Version', 'shortcol': 'v', 'changes': changes, 'diff': diff_data, }) prevnext_nav(req, _('Previous Change'), _('Next Change'), _('Wiki History')) return 'wiki_diff.html', data, None
def _render_view(self, req, page): version = page.resource.version # Add registered converters if page.exists: for conversion in Mimeview(self.env).get_supported_conversions( 'text/x-trac-wiki'): conversion_href = req.href.wiki(page.name, version=version, format=conversion[0]) # or... conversion_href = get_resource_url(self.env, page.resource, req.href, format=conversion[0]) add_link(req, 'alternate', conversion_href, conversion[1], conversion[3]) data = self._page_data(req, page) if page.name == 'WikiStart': data['title'] = '' if not page.exists: if 'WIKI_CREATE' not in req.perm(page.resource): raise ResourceNotFound(_('Page %(name)s not found', name=page.name)) latest_page = WikiPage(self.env, page.name, version=None) req.perm(latest_page.resource).require('WIKI_VIEW') prev_version = next_version = None if version: try: version = int(version) for hist in latest_page.get_history(): v = hist[0] if v != version: if v < version: if not prev_version: prev_version = v break else: next_version = v except ValueError: version = None prefix = self.PAGE_TEMPLATES_PREFIX templates = [template[len(prefix):] for template in WikiSystem(self.env).get_pages(prefix) if 'WIKI_VIEW' in req.perm('wiki', template)] # -- prev/up/next links if prev_version: add_link(req, 'prev', req.href.wiki(page.name, version=prev_version), _('Version %(num)s', num=prev_version)) parent = None if version: add_link(req, 'up', req.href.wiki(page.name, version=None), _('View latest version')) elif '/' in page.name: parent = page.name[:page.name.rindex('/')] add_link(req, 'up', req.href.wiki(parent, version=None), _("View parent page")) if next_version: add_link(req, 'next', req.href.wiki(page.name, version=next_version), _('Version %(num)s', num=next_version)) # Add ctxtnav entries if version: prevnext_nav(req, _('Version'), _('View Latest Version')) add_ctxtnav(req, _('Last Change'), req.href.wiki(page.name, action='diff', version=page.version)) else: if parent: add_ctxtnav(req, _('Up'), req.href.wiki(parent)) self._wiki_ctxtnav(req, page) context = Context.from_request(req, page.resource) data.update({ 'context': context, 'latest_version': latest_page.version, 'attachments': AttachmentModule(self.env).attachment_data(context), 'default_template': self.DEFAULT_PAGE_TEMPLATE, 'templates': templates, 'version': version }) return 'wiki_view.html', data, None
def _render_diff(self, req, db, page): req.perm.assert_permission('WIKI_VIEW') if not page.exists: raise TracError("Version %s of page %s does not exist" % (req.args.get('version'), page.name)) add_stylesheet(req, 'common/css/diff.css') self._set_title(req, page, u'变化') # Ask web spiders to not index old versions req.hdf['html.norobots'] = 1 old_version = req.args.get('old_version') if old_version: old_version = int(old_version) if old_version == page.version: old_version = None elif old_version > page.version: # FIXME: what about reverse diffs? old_version, page = page.version, \ WikiPage(self.env, page.name, old_version) latest_page = WikiPage(self.env, page.name) new_version = int(page.version) info = { 'version': new_version, 'latest_version': latest_page.version, 'history_href': req.href.wiki(page.name, action='history') } num_changes = 0 old_page = None prev_version = next_version = None for version,t,author,comment,ipnr in latest_page.get_history(): if version == new_version: if t: info['time'] = format_datetime(t) info['time_delta'] = pretty_timedelta(t) info['author'] = author or 'anonymous' info['comment'] = wiki_to_html(comment or '--', self.env, req, db) info['ipnr'] = ipnr or '' else: if version < new_version: num_changes += 1 if not prev_version: prev_version = version if (old_version and version == old_version) or \ not old_version: old_page = WikiPage(self.env, page.name, version) info['num_changes'] = num_changes info['old_version'] = version break else: next_version = version req.hdf['wiki'] = info # -- prev/next links if prev_version: add_link(req, 'prev', req.href.wiki(page.name, action='diff', version=prev_version), 'Version %d' % prev_version) if next_version: add_link(req, 'next', req.href.wiki(page.name, action='diff', version=next_version), 'Version %d' % next_version) # -- text diffs diff_style, diff_options = get_diff_options(req) oldtext = old_page and old_page.text.splitlines() or [] newtext = page.text.splitlines() context = 3 for option in diff_options: if option.startswith('-U'): context = int(option[2:]) break if context < 0: context = None changes = hdf_diff(oldtext, newtext, context=context, ignore_blank_lines='-B' in diff_options, ignore_case='-i' in diff_options, ignore_space_changes='-b' in diff_options) req.hdf['wiki.diff'] = changes
def _generate_blog(self, req, *args, **kwargs): """Extract the blog pages and fill the HDF. *args is a list of tags to use to limit the blog scope **kwargs are any aditional keyword arguments that are needed """ tallies = {} tags = TagEngine(self.env).tagspace.wiki try: union = kwargs['union'] except KeyError: union = False # Formatting read_post = "[wiki:%s Read Post]" entries = {} if not len(args): tlist = [self.env.config.get('blog', 'default_tag', 'blog')] else: tlist = args if union: blog = tags.get_tagged_names(tlist, operation='union') else: blog = tags.get_tagged_names(tlist, operation='intersection') macropage = req.args.get('page', None) poststart, postend, default_times = self._get_time_range(req, **kwargs) mark_updated = self._choose_value('mark_updated', req, kwargs, convert=bool_val) if not mark_updated and (not isinstance(mark_updated, bool)): mark_updated = bool_val(self.env.config.get('blog', 'mark_updated', True)) macro_bl = self.env.config.get('blog', 'macro_blacklist', '').split(',') macro_bl = [name.strip() for name in macro_bl if name.strip()] macro_bl.append('BlogShow') # Get the email addresses of all known users and validate the "poster" # BlogShow optional argument at the same time (avoids looping the user # list twice). is_poster = None limit_poster = self._choose_value('poster', req, kwargs, convert=None) email_map = {} for username, name, email in self.env.get_known_users(): if email: email_map[username] = email if limit_poster != None: if username == limit_poster: is_poster = username num_posts = self._choose_value('num_posts', req, kwargs, convert=int) if num_posts and default_times: poststart = sys.maxint postend = 0 for blog_entry in blog: if blog_entry == macropage: continue try: page = WikiPage(self.env, version=1, name=blog_entry) version, post_time, author, comment, ipnr = page.get_history( ).next() # if we're limiting by poster, do so now so that the calendar # only shows the number of entries the specific poster made. if is_poster != None: if is_poster != author: continue self._add_to_tallies(tallies, post_time, blog_entry) page = WikiPage(self.env, name=blog_entry) version, modified, author, comment, ipnr = page.get_history( ).next() except: self.log.debug("Error loading wiki page %s" % blog_entry, exc_info=True) continue if poststart >= post_time >= postend: time_format = self.env.config.get('blog', 'date_format') \ or '%x %X' timeStr = format_datetime(post_time, format=time_format) fulltext = page.text # remove comments in blog view: del_comments = re.compile('==== Comment.*\Z', re.DOTALL) fulltext = del_comments.sub('', fulltext) # remove the [[AddComment...]] tag, otherwise it would appeare # more than one and crew up the blog view: del_addcomment = re.compile('\[\[AddComment.*\Z', re.DOTALL) fulltext = del_addcomment.sub('', fulltext) # limit length of preview: post_size = self._choose_value('post_size', req, kwargs, int) if not post_size and (not isinstance(post_size, int)): post_size = int(self.env.config.get('blog', 'post_size', 1024)) text = self._trim_page(fulltext, blog_entry, post_size) pagetags = [x for x in tags.get_name_tags(blog_entry) if x not in tlist] tagtags = [] for i, t in enumerate(pagetags[:3]): d = { 'link' : t, 'name' : t, 'last' : i == (len(pagetags[:3]) - 1), } tagtags.append(d) continue # extract title from text: match = _title_split_match(fulltext) if match: title = match.group(1) fulltext = match.group(2) else: title = blog_entry html_text = wiki_to_html(fulltext, self.env, req) rss_text = Markup.escape(to_unicode(html_text)) data = { 'name' : blog_entry, 'title' : title, 'href' : self.env.href.wiki(blog_entry), 'wiki_link' : wiki_to_oneliner(read_post % blog_entry, self.env), 'time' : timeStr, 'date' : http_date(post_time), # 'date' : http_date(page.time), 'author' : author, 'wiki_text' : wiki_to_nofloat_html(text, self.env, req, macro_blacklist=macro_bl), 'rss_text' : rss_text, 'comment' : wiki_to_oneliner(comment, self.env), 'tags' : { 'present' : len(pagetags), 'tags' : tagtags, 'more' : len(pagetags) > 3 or 0, }, } if author: # For RSS, author must be an email address if author.find('@') != -1: data['author.email'] = author elif email_map.has_key(author): data['author.email'] = email_map[author] if (modified != post_time) and mark_updated: data['modified'] = 1 mod_str = format_datetime(modified, format=time_format) data['mod_time'] = mod_str entries[post_time] = data continue tlist = entries.keys() tlist.sort() tlist.reverse() if num_posts and (num_posts <= len(tlist)): tlist = tlist[:num_posts] if tlist: entries[tlist[-1]]['last'] = 1 req.hdf['blog.entries'] = [entries[x] for x in tlist] bloglink = self.env.config.get('blog', 'new_blog_link', 'New Blog Post') req.hdf['blog.newblog'] = bloglink hidecal = self._choose_value('hidecal', req, kwargs) if not hidecal: self._generate_calendar(req, tallies) req.hdf['blog.hidecal'] = hidecal # Insert /wiki/BlogHeader into /blog. If the page does not exist, # this'll be a no-op blog_header = WikiPage(self.env, name='BlogHeader').text req.hdf['blog.header'] = Mimeview(self.env).render(req, 'text/x-trac-wiki', blog_header)
def _generate_blog(self, req, args, kwargs): """Extract the blog pages and fill the HDF. *args is a list of tags to use to limit the blog scope **kwargs are any aditional keyword arguments that are needed """ tallies = {} parms = self._get_display_params(req, kwargs) tagged_pages, tlist = self._initialize_tags(args, kwargs) poststart, postend, default_times = self._get_time_range(parms, req, kwargs) # The blog can be displayed in various formats depending on the values # of the parameters. The two main methods/displays are: # * Fixed number of posts, ie. always show the last 5 posts. # * All posts in a given date range, ie. all posts for a given month. # Issues with the first method is that all posts have to be read, and # then sorted by date before the selection can be made, as we are not # guaranteed any specific order from the tag retrieval. # # Issues with the second are likewise. However, with the second, while # we still need to read in all of the posts, we can drop those that are # out of range in the process, so no extra sorting need be done. # # An option for parsing would be: # for each tagged post: # load post # save post_time and page name in list # sort the list # Formatting # # Create a dictionary of page names keyed by the post date. Create a # sorted (decending) list of post date timestamps. for p in tagged_pages: page = WikiPage(self.env, version=1, name=p) version, post_time, author, comment, ipnr = page.get_history( ).next() posts[post_time] = p # We also take time to update our tallies self._tally(tallies, post_time, p) continue post_times = posts.keys() post_times.sort() post_times.reverse() # Slice the list of post times down to those posts that fall in the # selected time frame and/or number of posts. if default_times and parms['num_posts']: post_times = post_times[:parms['num_posts']] else: i = 0 maxindex = len(post_times) while (i < maxindex) and (post_times[i] < poststart): i += 1 start_index = i while (i < maxindex) and (post_times[i] <= postend): i += 1 stop_index = i post_times = post_times[start_index:stop_index] if parms['num_posts'] and (parms['num_posts'] <= len(post_times)): post_times = post_times[:parms['num_posts']] # Now that we have the list of pages that we are going to use, what now? # We need the following information: #- * formatted post time #- * truncated(?) version of page to display # * list of tags to display in "tagged under" # * link name (tag) # * tag name (tag) # * last tag indicator #- * page name #- * link to wiki page (read_post) #- * author #- * comment (?) # * whether or not tags are present # * whether or not more tags are available #- * whether or not the post has been updated #- * update time entries = {} for post in post_times: page = WikiPage(self.env, name=posts[post]) version, modified, author, comment, ipnr = page.get_history().next() # Truncate the post text if necessary and format for display post_text = wiki_to_nofloat_html(self._trin_page(page.text), self.env, req, macro_blacklist=parms['macro_blacklist']) # Format the page time for display post_time = format_datetime(post_time, format=parms['date_format']) # Create the link to the full post text read_post_link = wiki_to_oneliner(parms['read_post'] % posts[post], self.env), # Set whether or not the page has been modified post_modified = ((modified != post) and parms['mark_updated']) # Create post modified string. If {{{post_modified}}} is false, # this string is simply ignored. modified_notice = format_datetime(modified, format=parms['date_format']) data = { 'name' : posts[post], 'wiki_text' : post_text, 'time' : post_time, 'author' : author, # need to make sure this is the orig. 'comment' : comment, 'modified' : post_modified, 'mod_time' : modified_notice, 'wiki_link' : read_post_link, 'tags' : { 'present' : tags_present, 'tags' : page_tags, 'more' : more_tags, }, } entries[post_time] = data for blog_entry in blog: pagetags = [x for x in self.tags.get_name_tags(blog_entry) if x not in tlist] tagtags = [] for i, t in enumerate(pagetags[:3]): d = { 'link' : t, 'name' : t, 'last' : i == 2, } tagtags.append(d) continue data = { 'tags' : { 'present' : len(pagetags), 'tags' : tagtags, 'more' : len(pagetags) > 3 or 0, }, } continue # mark the last post so that we don't display a line after the last post if post_times: entries[post_times[-1]]['last'] = 1 # fill the HDF with the blog entries req.hdf['blog.entries'] = [entries[x] for x in post_times] # set link name for blog posts bloglink = self.env.config.get('blog', 'new_blog_link', 'New Blog Post') req.hdf['blog.newblog'] = bloglink # generate calendar hidecal = self._choose_value('hidecal', req, kwargs) if not hidecal: self._generate_calendar(req, tallies) req.hdf['blog.hidecal'] = hidecal pass
def _generate_blog(self, req, *args, **kwargs): """Extract the blog pages and fill the HDF. *args is a list of tags to use to limit the blog scope **kwargs are any aditional keyword arguments that are needed """ tallies = {} tags = TagEngine(self.env).tagspace.wiki try: union = kwargs['union'] except KeyError: union = False # Formatting read_post = "[wiki:%s Read Post]" entries = {} if not len(args): tlist = [self.env.config.get('blog', 'default_tag', 'blog')] else: tlist = args if union: blog = tags.get_tagged_names(tlist, operation='union') else: blog = tags.get_tagged_names(tlist, operation='intersection') macropage = req.args.get('page', None) poststart, postend, default_times = self._get_time_range(req, **kwargs) mark_updated = self._choose_value('mark_updated', req, kwargs, convert=bool_val) if not mark_updated and (not isinstance(mark_updated, bool)): mark_updated = bool_val( self.env.config.get('blog', 'mark_updated', True)) macro_bl = self.env.config.get('blog', 'macro_blacklist', '').split(',') macro_bl = [name.strip() for name in macro_bl if name.strip()] macro_bl.append('BlogShow') # Get the email addresses of all known users and validate the "poster" # BlogShow optional argument at the same time (avoids looping the user # list twice). is_poster = None limit_poster = self._choose_value('poster', req, kwargs, convert=None) email_map = {} for username, name, email in self.env.get_known_users(): if email: email_map[username] = email if limit_poster != None: if username == limit_poster: is_poster = username num_posts = self._choose_value('num_posts', req, kwargs, convert=int) if num_posts and default_times: poststart = sys.maxint postend = 0 for blog_entry in blog: if blog_entry == macropage: continue try: page = WikiPage(self.env, version=1, name=blog_entry) version, post_time, author, comment, ipnr = page.get_history( ).next() # if we're limiting by poster, do so now so that the calendar # only shows the number of entries the specific poster made. if is_poster != None: if is_poster != author: continue self._add_to_tallies(tallies, post_time, blog_entry) page = WikiPage(self.env, name=blog_entry) version, modified, author, comment, ipnr = page.get_history( ).next() except: self.log.debug("Error loading wiki page %s" % blog_entry, exc_info=True) continue if poststart >= post_time >= postend: time_format = self.env.config.get('blog', 'date_format') \ or '%x %X' timeStr = format_datetime(post_time, format=time_format) fulltext = page.text # remove comments in blog view: del_comments = re.compile('==== Comment.*\Z', re.DOTALL) fulltext = del_comments.sub('', fulltext) # remove the [[AddComment...]] tag, otherwise it would appeare # more than one and crew up the blog view: del_addcomment = re.compile('\[\[AddComment.*\Z', re.DOTALL) fulltext = del_addcomment.sub('', fulltext) # limit length of preview: post_size = self._choose_value('post_size', req, kwargs, int) if not post_size and (not isinstance(post_size, int)): post_size = int( self.env.config.get('blog', 'post_size', 1024)) text = self._trim_page(fulltext, blog_entry, post_size) pagetags = [ x for x in tags.get_name_tags(blog_entry) if x not in tlist ] tagtags = [] for i, t in enumerate(pagetags[:3]): d = { 'link': t, 'name': t, 'last': i == (len(pagetags[:3]) - 1), } tagtags.append(d) continue # extract title from text: match = _title_split_match(fulltext) if match: title = match.group(1) fulltext = match.group(2) else: title = blog_entry html_text = wiki_to_html(fulltext, self.env, req) rss_text = Markup.escape(to_unicode(html_text)) data = { 'name': blog_entry, 'title': title, 'href': self.env.href.wiki(blog_entry), 'wiki_link': wiki_to_oneliner(read_post % blog_entry, self.env), 'time': timeStr, 'date': http_date(post_time), # 'date' : http_date(page.time), 'author': author, 'wiki_text': wiki_to_nofloat_html(text, self.env, req, macro_blacklist=macro_bl), 'rss_text': rss_text, 'comment': wiki_to_oneliner(comment, self.env), 'tags': { 'present': len(pagetags), 'tags': tagtags, 'more': len(pagetags) > 3 or 0, }, } if author: # For RSS, author must be an email address if author.find('@') != -1: data['author.email'] = author elif email_map.has_key(author): data['author.email'] = email_map[author] if (modified != post_time) and mark_updated: data['modified'] = 1 mod_str = format_datetime(modified, format=time_format) data['mod_time'] = mod_str entries[post_time] = data continue tlist = entries.keys() tlist.sort() tlist.reverse() if num_posts and (num_posts <= len(tlist)): tlist = tlist[:num_posts] if tlist: entries[tlist[-1]]['last'] = 1 req.hdf['blog.entries'] = [entries[x] for x in tlist] bloglink = self.env.config.get('blog', 'new_blog_link', 'New Blog Post') req.hdf['blog.newblog'] = bloglink hidecal = self._choose_value('hidecal', req, kwargs) if not hidecal: self._generate_calendar(req, tallies) req.hdf['blog.hidecal'] = hidecal # Insert /wiki/BlogHeader into /blog. If the page does not exist, # this'll be a no-op blog_header = WikiPage(self.env, name='BlogHeader').text req.hdf['blog.header'] = Mimeview(self.env).render( req, 'text/x-trac-wiki', blog_header)
def _render_diff(self, req, db, page): req.perm.assert_permission('WIKI_VIEW') if not page.exists: raise TracError("Version %s of page %s does not exist" % (req.args.get('version'), page.name)) add_stylesheet(req, 'common/css/diff.css') self._set_title(req, page, u'变化') # Ask web spiders to not index old versions req.hdf['html.norobots'] = 1 old_version = req.args.get('old_version') if old_version: old_version = int(old_version) if old_version == page.version: old_version = None elif old_version > page.version: # FIXME: what about reverse diffs? old_version, page = page.version, \ WikiPage(self.env, page.name, old_version) latest_page = WikiPage(self.env, page.name) new_version = int(page.version) info = { 'version': new_version, 'latest_version': latest_page.version, 'history_href': req.href.wiki(page.name, action='history') } num_changes = 0 old_page = None prev_version = next_version = None for version, t, author, comment, ipnr in latest_page.get_history(): if version == new_version: if t: info['time'] = format_datetime(t) info['time_delta'] = pretty_timedelta(t) info['author'] = author or 'anonymous' info['comment'] = wiki_to_html(comment or '--', self.env, req, db) info['ipnr'] = ipnr or '' else: if version < new_version: num_changes += 1 if not prev_version: prev_version = version if (old_version and version == old_version) or \ not old_version: old_page = WikiPage(self.env, page.name, version) info['num_changes'] = num_changes info['old_version'] = version break else: next_version = version req.hdf['wiki'] = info # -- prev/next links if prev_version: add_link( req, 'prev', req.href.wiki(page.name, action='diff', version=prev_version), 'Version %d' % prev_version) if next_version: add_link( req, 'next', req.href.wiki(page.name, action='diff', version=next_version), 'Version %d' % next_version) # -- text diffs diff_style, diff_options = get_diff_options(req) oldtext = old_page and old_page.text.splitlines() or [] newtext = page.text.splitlines() context = 3 for option in diff_options: if option.startswith('-U'): context = int(option[2:]) break if context < 0: context = None changes = hdf_diff(oldtext, newtext, context=context, ignore_blank_lines='-B' in diff_options, ignore_case='-i' in diff_options, ignore_space_changes='-b' in diff_options) req.hdf['wiki.diff'] = changes