def execute(macro, args, formatter=None): if not formatter: formatter = macro.formatter _ = macro.request.getText re_args = re.search('(?P<caption>.+)\,\s(?P<the_rest>.*)', args) pagename = re_args.group('caption') items = re_args.group('the_rest') page = Page(pagename, macro.request) try: links = max(int(items), 1) except StandardError: links = 1 raw = page.get_raw_body(fresh=macro.request.set_cache) if not macro.request.user.may.read(page): raw = "" # this selects lines looking like a list item # !!! TODO: make multi-line quotes possible # (optionally split by "----" or something) quotes = raw.splitlines() if links > 1: quotes = [quote for quote in quotes if quote.startswith(' *')] random.shuffle(quotes) while len(quotes) > links: quotes = quotes[:-1] quote = '' for name in quotes: quote = quote + name + '\n' page.set_raw_body(quote, 1) out = cStringIO.StringIO() macro.request.redirect(out) page.send_page(content_only=1, content_id="randomquote_%s" % wikiutil.quoteWikiname(page.page_name) ) quote = out.getvalue() macro.request.redirect() else: quotes = [quote.strip() for quote in quotes] quotes = [quote[2:] for quote in quotes if quote.startswith('* ')] if quotes: quote = random.choice(quotes) else: quote = '' page.set_raw_body(quote, 1) out = cStringIO.StringIO() macro.request.redirect(out) page.send_page(content_only=1, content_id="randomquote_%s" % wikiutil.quoteWikiname(page.page_name) ) quote = out.getvalue() macro.request.redirect() if not quotes: return (macro.formatter.highlight(1) + _('No quotes on %(pagename)s.') % {'pagename': pagename} + macro.formatter.highlight(0)) return quote.decode(config.charset)
def loadLanguage(request, lang): """ Load text dictionary for a specific language. Note that while ISO language coded use a dash, like 'en-us', our languages files use '_' like 'en_us' because they are saved as Python source files. """ from Sycamore.util import pysupport lang_module = "Sycamore.i18n." + filename(lang) texts = pysupport.importName(lang_module, "text") meta = pysupport.importName(lang_module, "meta") # FIXME this doesnt work, leads to &amp;amp;... # maybe parser.wiki._do_ent_repl is the problem? # please help finding this bug. I want to get rid of html in i18n texts # and a nice way to do is to replace them by wiki markup. so we wont have # to change them every time we go to a new html standard (like html 4.01 # now and soon maybe xhtml). # use the wiki parser now to replace some wiki markup with html # maybe this is the better implementation, less overhead if 0: from Sycamore.Page import Page from Sycamore.parser.wiki import Parser from Sycamore.formatter.text_html import Formatter import cStringIO for key in texts: text = texts[key] out = cStringIO.StringIO() request.redirect(out) print "making parser ..." parser = Parser(text, request) formatter = Formatter(request) p = Page("$$$$i18n$$$$") formatter.setPage(p) print "formatting ..." parser.format(formatter) print "formatting finished ..." text = out.getvalue() request.redirect() #if text.startswith("<p>\n"): # text = text[4:] #if text.endswith("</p>\n"): # text = text[:-5] #print text # XXX catch problem early: if "&amp;" in text: raise str(key)+str(text) texts[key] = text #alternative implementation, also doesnt work: if 0: import cStringIO from Sycamore.Page import Page page = Page("$$$i18n$$$") #key = "xxx" for key in texts: text = texts[key] page.set_raw_body(text, 1) out = cStringIO.StringIO() request.redirect(out) page.send_page(request, content_only=1) text = out.getvalue() if text.startswith("<p>\n"): text = text[4:] if text.endswith("</p>\n"): text = text[:-5] #print text request.redirect() texts[key] = text # TODO caching for CGI or performance will suck # pickle texts dict to caching area # XXX UNICODE # convert to unicode #encoding = meta['encoding'] #for t in texts: # texts[t] = texts[t].decode(encoding) return texts
group = wikiacl.Group(groupname, req) groupdict = get_group_members(groupname, req) group.update(groupdict) group.save() print " ", admin_group, "->", 'Admin' group = wikiacl.Group('Admin', req) groupdict = get_group_members(admin_group, req) group.update(groupdict) group.save() print " ", banned_group, "->", 'Banned' group = wikiacl.Group('Banned', req) groupdict = get_group_members(banned_group, req) group.update(groupdict) group.save() # note on group page that this is not how it's defined any more for groupname in defined_user_groups + [admin_group, banned_group]: p = Page(groupname, req) if p.exists(): new_body = p.get_raw_body() + '\n\n' + group_changed_message p.set_raw_body(new_body) req.cursor.execute("UPDATE curPages set text=%(new_body)s where name=%(pagename)s and wiki_id=%(wiki_id)s", {'new_body':new_body, 'pagename':p.page_name, 'wiki_id':req.config.wiki_id}, isWrite=True) req.cursor.execute("UPDATE allPages set text=%(new_body)s where name=%(pagename)s and editTime=%(mtime)s and wiki_id=%(wiki_id)s", {'new_body':new_body, 'pagename':p.page_name, 'mtime':p.mtime(), 'wiki_id':req.config.wiki_id}, isWrite=True) p.buildCache() req.db_disconnect() print "..Done!"