def _consider_talk_link(self, links): if not self.request.config.talk_pages: return links lower_links = [ link.lower() for link in links ] from Sycamore.Page import Page pagename = self.key page = Page(pagename, self.request) linkto_name = None if page.isTalkPage(): article_page = Page(wikiutil.talk_to_article_pagename(pagename), self.request) article_pagename = article_page.proper_name() if article_pagename.lower() not in lower_links: links.append(article_pagename) else: talk_pagename = wikiutil.article_to_talk_pagename(pagename) talk_page = Page(talk_pagename, self.request) # add dependency so that editing/creating the talk page # has an affect on the article page's links dependency(pagename, talk_pagename.lower(), self.request) if talk_page.exists(): talk_pagename = talk_page.proper_name() if talk_pagename.lower() not in lower_links: links.append(talk_pagename) return links
def execute(macro, args, formatter): if not formatter: formatter = macro.formatter # get number of wanted links try: links = max(int(args), 1) except StandardError: links = 1 # select the pages from the page list random_list = wikiutil.getRandomPages(macro.request) pages = [] while len(pages) < links and random_list: pagename = random.choice(random_list) page = Page(pagename, macro.request) if macro.request.user.may.read(page) and page.exists() and not page.isRedirect(): if page.proper_name()[0:6] != 'Users/' and page.proper_name()[-5:] != '/Talk': pages.append(page) # return a single page link if links == 1: return pages[0].link_to() # return a list of page links pages.sort() result = [macro.formatter.bullet_list(1)] for page in pages: result.append("%s%s%s" % (macro.formatter.listitem(1), page.link_to(), macro.formatter.listitem(0))) result.append(macro.formatter.bullet_list(0)) return ''.join(result)
def putFile(request, dict, thumbnail=False, do_delete=False, temporary=False, ticket=None, permanent=False): """ Puts the file (found in dict) into the database. dict is a dictionary with possible keys: filename, filecontent, uploaded_time, uploaded_by, pagename, uploaded_by_ip, xsize, ysize, deleted_time, deleted_by, deleted_by_ip. """ from Sycamore.wikiutil import mc_quote, isImage from Sycamore.Page import Page from Sycamore import caching from Sycamore.action.Files import get_filedict def set_cache_for_file(): """ Sets the memory cache for the new file. """ if not config.memcache: return if not do_delete: if not thumbnail: table = 'files' else: table = 'thumbnails' if not temporary: key = "%s:%s,%s" % (table, mc_quote(dict['filename']), mc_quote(dict['pagename'].lower())) else: key = "%s,%s,%s" % (table, mc_quote(dict['filename']), ticket) image_obj = (raw_image, uploaded_time) request.mc.set(key, image_obj) else: if not thumbnail: key = "files:%s,%s" % (mc_quote(dict['filename']), mc_quote(dict['pagename'].lower())) request.mc.set(key, False) if is_image and thumbnail: key = "thumbnails:%s,%s" % (mc_quote(dict['filename']), mc_quote(dict['pagename'].lower())) request.mc.set(key, False) # set new file dict if not replaced_image: get_filedict(request, dict['pagename'], fresh=True, set=True) def rebuild_page_cache(): """ Rebuilds the page cache. """ if not request.generating_cache and not request.previewing_page: from Sycamore import caching from Sycamore.Page import Page page = Page(dict['pagename'], request) if page.exists(): page.buildCache() def handle_file_add(): request.cursor.execute( """SELECT name from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict) exists = request.cursor.fetchone() if exists: # backup file, then remove it replaced_image = True request.cursor.execute( """INSERT into oldFiles (name, file, uploaded_time, uploaded_by, attached_to_pagename, deleted_time, deleted_by, uploaded_by_ip, deleted_by_ip, attached_to_pagename_propercased, wiki_id) values (%(filename)s, (select file from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), (select uploaded_time from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), (select uploaded_by from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), %(pagename)s, %(uploaded_time)s, %(uploaded_by)s, (select uploaded_by_ip from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), %(uploaded_by_ip)s, %(pagename_propercased)s, %(wiki_id)s)""", dict, isWrite=True) if is_image: request.cursor.execute( """INSERT into oldImageInfo (name, attached_to_pagename, xsize, ysize, uploaded_time, wiki_id) values (%(filename)s, %(pagename)s, (select xsize from imageInfo where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), (select ysize from imageInfo where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), (select uploaded_time from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), %(wiki_id)s)""", dict, isWrite=True) request.cursor.execute( """DELETE from imageInfo where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict, isWrite=True) request.cursor.execute( """DELETE from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict, isWrite=True) request.cursor.execute( """INSERT into files (name, file, uploaded_time, uploaded_by, attached_to_pagename, uploaded_by_ip, attached_to_pagename_propercased, wiki_id) values (%(filename)s, %(filecontent)s, %(uploaded_time)s, %(uploaded_by)s, %(pagename)s, %(uploaded_by_ip)s, %(pagename_propercased)s, %(wiki_id)s)""", dict, isWrite=True) if is_image: request.cursor.execute( """INSERT into imageInfo (name, attached_to_pagename, xsize, ysize, wiki_id) values (%(filename)s, %(pagename)s, %(xsize)s, %(ysize)s, %(wiki_id)s)""", dict, isWrite=True) caching.updateRecentChanges(page) def handle_thumbnail_add(): request.cursor.execute( """SELECT name from thumbnails where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict) exists = request.cursor.fetchone() if exists: request.cursor.execute( """UPDATE thumbnails set xsize=%(x)s, ysize=%(y)s, image=%(filecontent)s, last_modified=%(uploaded_time)s where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict, isWrite=True) else: request.cursor.execute( """INSERT into thumbnails (xsize, ysize, name, image, last_modified, attached_to_pagename, wiki_id) values (%(x)s, %(y)s, %(filename)s, %(filecontent)s, %(uploaded_time)s, %(pagename)s, %(wiki_id)s)""", dict, isWrite=True) def handle_file_delete(): request.cursor.execute( """SELECT name from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict) has_file = request.cursor.fetchone() if has_file: if not permanent: # backup file request.cursor.execute(""" INSERT into oldFiles (name, attached_to_pagename, file, uploaded_by, uploaded_time, deleted_time, deleted_by, uploaded_by_ip, deleted_by_ip, attached_to_pagename_propercased, wiki_id) values (%(filename)s, %(pagename)s, (select file from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), (select uploaded_by from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), (select uploaded_time from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), %(deleted_time)s, %(deleted_by)s, (select uploaded_by_ip from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), %(deleted_by_ip)s, (select attached_to_pagename_propercased from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), %(wiki_id)s)""", dict, isWrite=True) else: # nuke all old cached versions of the file caching.deleteAllFileInfo(dict['filename'], dict['pagename'], request) # nuke all old versions request.cursor.execute( """DELETE from oldFiles where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict, isWrite=True) if is_image: if not permanent: # backup image info request.cursor.execute( """INSERT into oldImageInfo (name, attached_to_pagename, xsize, ysize, uploaded_time, wiki_id) values (%(filename)s, %(pagename)s, (select xsize from imageInfo where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), (select ysize from imageInfo where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), (select uploaded_time from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s ), %(wiki_id)s)""", dict, isWrite=True) else: # nuke all old versions request.cursor.execute( """DELETE from oldImageInfo where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict, isWrite=True) # delete image info request.cursor.execute( """DELETE from imageInfo where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict, isWrite=True) # delete file request.cursor.execute( """DELETE from files where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict, isWrite=True) caching.updateRecentChanges(page) def handle_thumbnail_delete(): """ delete thumbnail. """ request.cursor.execute( """DELETE from thumbnails where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s""", dict, isWrite=True) # prep for insert of binary data if dict.has_key('filecontent'): raw_image = dict['filecontent'] uploaded_time = dict['uploaded_time'] dict['filecontent'] = dbapi.Binary(raw_image) page = Page(dict['pagename'], request) dict['pagename_propercased'] = page.proper_name() dict['pagename'] = dict['pagename'].lower() dict['wiki_id'] = request.config.wiki_id replaced_image = False is_image = isImage(dict['filename']) if temporary: # we don't update the database in this case set_cache_for_file() rebuild_page_cache() return if not thumbnail and not do_delete: handle_file_add() elif thumbnail and not do_delete: handle_thumbnail_add() elif do_delete: if not thumbnail: handle_file_delete() else: handle_thumbnail_delete() set_cache_for_file() rebuild_page_cache()
def testUrl(self): """Test's Page.url().""" from Sycamore.Page import Page from Sycamore import farm list_of_wikis = self._get_list_of_wikis() # relative, w/o query string for trial in xrange(0, 200): pagename = make_random_string(MAX_PAGENAME_LENGTH) page = Page(pagename, self.request) pagename = page.proper_name() pagename_encoded = wikiutil.quoteWikiname(pagename) proper_url = '/%s' % pagename_encoded self.assertEqual(proper_url, page.url()) # relative, w/ query string for trial in xrange(0, 200): pagename = make_random_string(MAX_PAGENAME_LENGTH) query = '?' the_range = random.randint(0, 10) for i in xrange(0, the_range): if i < (the_range-1): amperstand = '&' else: amperstand = '' query += ('%s=%s%s' % (make_random_string(50, alphanum_only=True), make_random_string(50, alphanum_only=True), amperstand)) page = Page(pagename, self.request) pagename = page.proper_name() pagename_encoded = wikiutil.quoteWikiname(pagename) proper_url = '/%s?%s' % (pagename_encoded, query) self.assertEqual(proper_url, page.url(querystr = query)) original_wiki_name = self.request.config.wiki_name # absolute url, switched request for wiki_trial in xrange(0, 10): self.request.switch_wiki(random.choice(list_of_wikis)) for trial in xrange(0, 200): pagename = make_random_string(MAX_PAGENAME_LENGTH) farm_url = farm.getWikiURL(self.request.config.wiki_name, self.request) page = Page(pagename, self.request) pagename = page.proper_name() pagename_encoded = wikiutil.quoteWikiname(pagename) proper_url = '%s%s' % (farm_url, pagename_encoded) self.assertEqual(proper_url, page.url(relative=False)) for trial in xrange(0, 200): pagename = make_random_string(MAX_PAGENAME_LENGTH) farm_url = farm.getWikiURL(self.request.config.wiki_name, self.request) query = '?' the_range = random.randint(0, 10) for i in xrange(0, the_range): if i < (the_range-1): amperstand = '&' else: amperstand = '' query += ('%s=%s%s' % (make_random_string(50, alphanum_only=True), make_random_string(50, alphanum_only=True), amperstand)) page = Page(pagename, self.request) pagename = page.proper_name() pagename_encoded = wikiutil.quoteWikiname(pagename) proper_url = '%s%s?%s' % (farm_url, pagename_encoded, query) self.assertEqual(proper_url, page.url(querystr=query, relative=False)) self.request.switch_wiki(original_wiki_name) # absolute url, non-switched request for wiki_trial in xrange(0, 10): wiki_name = random.choice(list_of_wikis) for trial in xrange(0, 200): pagename = make_random_string(MAX_PAGENAME_LENGTH) farm_url = farm.getWikiURL(wiki_name, self.request) page = Page(pagename, self.request, wiki_name=wiki_name) pagename = page.proper_name() pagename_encoded = wikiutil.quoteWikiname(pagename) proper_url = '%s%s' % (farm_url, pagename_encoded) self.assertEqual(proper_url, page.url(relative=False)) for trial in xrange(0, 200): pagename = make_random_string(MAX_PAGENAME_LENGTH) farm_url = farm.getWikiURL(wiki_name, self.request) query = '?' the_range = random.randint(0, 10) for i in xrange(0, the_range): if i < (the_range-1): amperstand = '&' else: amperstand = '' query += ('%s=%s%s' % (make_random_string(50, alphanum_only=True), make_random_string(50, alphanum_only=True), amperstand)) page = Page(pagename, self.request, wiki_name=wiki_name) pagename = page.proper_name() pagename_encoded = wikiutil.quoteWikiname(pagename) proper_url = '%s%s?%s' % (farm_url, pagename_encoded, query) self.assertEqual(proper_url, page.url(querystr = query, relative=False))
def execute(macro, args, formatter=None): if not formatter: if hasattr(macro.parser, 'formatter'): formatter = macro.parser.formatter else: formatter = macro.formatter _ = macro.request.getText inline_edit_state = formatter.inline_edit formatter.inline_edit = False # included page will already have paragraphs. no need to print another. macro.parser.inhibit_p = 1 if line_has_just_macro(macro, args, formatter): macro.parser.inhibit_br = 2 request = macro.request # parse and check arguments if not args: return (_sysmsg % ('error', _('You did not give a pagename of a page to ' 'include!'))) # prepare including page result = [] this_page = formatter.page # if we're in a paragraph, let's close it. if macro.formatter.in_p: result.append(macro.formatter.paragraph(0)) if not hasattr(this_page, '_macroInclude_pagelist'): this_page._macroInclude_pagelist = {} re_args = re.match('(' '(' '(?P<name1>.+?)(\s*,\s*)((".*")|(left|right)|([0-9]{1,2}%)))|' '(?P<name2>.+))', args) if not re_args: return (_sysmsg % ('error', _('Invalid arguments to Include.'))) have_more_args = re_args.group('name1') page_name = re_args.group('name1') or re_args.group('name2') if have_more_args: args = args[re_args.end('name1'):] else: args = '' re_args = re.search('"(?P<heading>.*)"', args) if re_args: heading = re_args.group('heading') else: heading = None if heading: before_heading = args[:re_args.start('heading')-1].strip() after_heading = args[re_args.end('heading')+1:].strip() args = before_heading + after_heading[1:] args_elements = args.split(',') align = None was_given_width = False width = '50%' for arg in args_elements: arg = arg.strip() if arg == 'left' or arg == 'right': align = arg elif arg.endswith('%'): try: arg = str(int(arg[:-1])) + '%' except: continue width = arg was_given_width = True inc_name = wikiutil.AbsPageName(this_page.page_name, page_name) inc_page = Page(inc_name, macro.request) if not macro.request.user.may.read(inc_page): return '' if this_page.page_name.lower() == inc_name.lower(): result.append('<p><strong class="error">' 'Recursive include of "%s" forbidden</strong></p>' % inc_name) return ''.join(result) # check for "from" and "to" arguments (allowing partial includes) body = inc_page.get_raw_body(fresh=True) + '\n' edit_icon = '' # do headings level = 1 if heading: result.append(formatter.heading(level, heading, action_link="edit", link_to_heading=True, pagename=inc_page.proper_name(), backto=this_page.page_name)) if this_page._macroInclude_pagelist.has_key(inc_name): if (this_page._macroInclude_pagelist[inc_name] > caching.MAX_DEPENDENCY_DEPTH): return '<em>Maximum include depth exceeded.</em>' # set or increment include marker this_page._macroInclude_pagelist[inc_name] = \ this_page._macroInclude_pagelist.get(inc_name, 0) + 1 # format the included page pi_format = config.default_markup or "wiki" Parser = wikiutil.importPlugin("parser", pi_format, "Parser") raw_text = inc_page.get_raw_body(fresh=True) formatter.setPage(inc_page) parser = Parser(raw_text, formatter.request) parser.print_first_p = 0 # don't print two <p>'s # note that our page now depends on the content of the included page if formatter.name == 'text_python': # this means we're in the caching formatter caching.dependency(this_page.page_name, inc_name.lower(), macro.request) # output formatted buffer = cStringIO.StringIO() formatter.request.redirect(buffer) parser.format(formatter, inline_edit_default_state=False) formatter.setPage(this_page) formatter.request.redirect() text = buffer.getvalue().decode('utf-8') buffer.close() result.append(text) # decrement or remove include marker if this_page._macroInclude_pagelist[inc_name] > 1: this_page._macroInclude_pagelist[inc_name] -= 1 else: del this_page._macroInclude_pagelist[inc_name] attrs = '' if align: attrs += (' style="width: %s; float: %s; clear: %s;" ' % (width, align, align)) elif was_given_width: attrs += ' style="width: %s;' % width attrs += ' class="includedPage"' include_page = '<div%s>%s</div>' % (attrs, ''.join(result)) ## turn back on inline editing ability parser.formatter.inline_edit = inline_edit_state formatter.inline_edit = inline_edit_state # return include text return include_page