def macro(self, content, arguments, page_url, alternative): request = self.request if arguments: item_count = int(arguments[0]) else: item_count = 1 rootitem = Item(request, u'') all_item_names = [i.name for i in rootitem.list_items()] # Now select random item from the full list, and if it exists and # we can read it, save. random_item_names = [] found = 0 while found < item_count and all_item_names: # Take one random item from the list item_name = random.choice(all_item_names) all_item_names.remove(item_name) # Filter out items the user may not read. try: item = Item.create(item_name) random_item_names.append(item_name) found += 1 except AccessDeniedError: pass if not random_item_names: return random_item_names.sort() result = moin_page.span() for name in random_item_names: link = unicode(Iri(scheme=u'wiki', authority=u'', path=u'/' + name)) result.append(moin_page.a(attrib={xlink.href: link}, children=[name])) result.append(", ") del result[-1] # kill last comma return result
def recurse(self, elem, page_href): # Check if we reached a new page page_href_new = elem.get(self.tag_page_href) if page_href_new: page_href_new = Iri(page_href_new) if page_href_new != page_href: page_href = page_href_new self.stack.append(page_href) else: self.stack.append(None) else: self.stack.append(None) try: if elem.tag == self.tag_xi_include: href = elem.get(self.tag_xi_href) xpointer = elem.get(self.tag_xi_xpointer) xp_include_pages = None xp_include_sort = None xp_include_items = None xp_include_skipitems = None xp_include_heading = None xp_include_level = None if xpointer: xp = XPointer(xpointer) xp_include = None xp_namespaces = {} for entry in xp: uri = None name = entry.name.split(':', 1) if len(name) > 1: prefix, name = name uri = xp_namespaces.get(prefix, False) else: name = name[0] if uri is None and name == 'xmlns': d_prefix, d_uri = entry.data.split('=', 1) xp_namespaces[d_prefix] = d_uri elif uri == moin_page.namespace and name == 'include': xp_include = XPointer(entry.data) if xp_include: for entry in xp_include: name, data = entry.name, entry.data if name == 'pages': xp_include_pages = data elif name == 'sort': xp_include_sort = data elif name == 'items': xp_include_items = int(data) elif name == 'skipitems': xp_include_skipitems = int(data) elif name == 'heading': xp_include_heading = data elif name == 'level': xp_include_level = data if href: # We have a single page to include href = Iri(href) link = Iri(scheme='wiki', authority='') if href.scheme == 'wiki': if href.authority: raise ValueError( "can't handle xinclude for non-local authority" ) else: path = href.path[1:] elif href.scheme == 'wiki.local': page = page_href path = href.path if path[0] == '': # /subitem tmp = page.path[1:] tmp.extend(path[1:]) path = tmp elif path[0] == '..': # ../sisteritem path = page.path[1:] + path[1:] else: raise ValueError( "can't handle xinclude for schemes other than wiki or wiki.local" ) link.path = path page = Item.create(unicode(path)) pages = ((page, link), ) elif xp_include_pages: # We have a regex of pages to include from MoinMoin.search.term import NameFn inc_match = re.compile(xp_include_pages) root_item = Item(name=u'') pagelist = [ item.name for item in root_item.list_items(NameFn(inc_match)) ] pagelist.sort() if xp_include_sort == 'descending': pagelist.reverse() if xp_include_skipitems is not None: pagelist = pagelist[xp_include_skipitems:] if xp_include_items is not None: pagelist = pagelist[xp_include_items + 1:] pages = ((Item.create(p), Iri(scheme='wiki', authority='', path='/' + p)) for p in pagelist) included_elements = [] for page, page_href in pages: if page_href in self.stack: w = ( '<p xmlns="%s"><strong class="error">Recursive include of "%s" forbidden</strong></p>' % (html.namespace, page.name)) div.append(ET.XML(w)) continue # TODO: Is this correct? if not flaskg.user.may.read(page.name): continue if xp_include_heading is not None: attrib = {self.tag_href: page_href} children = (xp_include_heading or page.name, ) elem_a = ET.Element(self.tag_a, attrib, children=children) attrib = { self.tag_outline_level: xp_include_level or '1' } elem_h = ET.Element(self.tag_h, attrib, children=(elem_a, )) div.append(elem_h) page_doc = page.internal_representation() # page_doc.tag = self.tag_div # XXX why did we have this? self.recurse(page_doc, page_href) # Wrap the page with the overlay, but only if it's a "page", or "a". # The href needs to be an absolute URI, without the prefix "wiki://" if page_doc.tag.endswith("page") or page_doc.tag.endswith( "a"): page_doc = wrap_object_with_overlay( page_doc, href=unicode(page_href.path)) included_elements.append(page_doc) if len(included_elements) > 1: # use a div as container result = ET.Element(self.tag_div) result.extend(included_elements) elif included_elements: result = included_elements[0] else: result = None return result for i in xrange(len(elem)): child = elem[i] if isinstance(child, ET.Node): ret = self.recurse(child, page_href) if ret: elem[i] = ret finally: self.stack.pop()