def xmlrpc_listLinks(self, pagename): """ list links for a given page @param pagename: the page name @rtype: list @return: links of the page, structs, with the following elements * name (string) : The page name or URL the link is to, UTF-8 encoding. * type (int) : The link type. Zero (0) for internal Wiki link, one (1) for external link (URL - image link, whatever). """ pagename = self._instr(pagename) # User may read page? if not self.request.user.may.read(pagename): return self.notAllowedFault() page = Page(self.request, pagename) # Non existing page? if not page.exists(): return self.noSuchPageFault() links_out = [] for link in page.getPageLinks(self.request): links_out.append({'name': self._outstr(link), 'type': 0}) return links_out
def _getPages(request, pagename): """ Return a (filtered) list of pages names. """ page = Page(request, pagename) #links = page.parsePageLinks(request) links = page.getPageLinks(request) return links
def xmlrpc_listLinks(self, pagename): """ list links for a given page @param pagename: the page name @rtype: list @return: links of the page, structs, with the following elements * name (string) : The page name or URL the link is to, UTF-8 encoding. * type (int) : The link type. Zero (0) for internal Wiki link, one (1) for external link (URL - image link, whatever). """ pagename = self._instr(pagename) # User may read page? if not self.request.user.may.read(pagename): return self.notAllowedFault() page = Page(self.request, pagename) # Non existing page? if not page.exists(): return self.noSuchPageFault() links_out = [] for link in page.getPageLinks(self.request): links_out.append({ 'name': self._outstr(link), 'type': 0 }) return links_out
def mainloop(self): self.init_request() request = self.request # make cache related to pagelinks entries of a page pages = request.rootpage.getPageList(user='', exists=1) for pagename in pages: page = Page(request, pagename) request.page = page p = page.getPageLinks(request)
def _index_page_rev(self, request, connection, pagename, revno, mode='update'): """ Index a page revision. @param request: request suitable for indexing @param connection: the Indexer connection object @param pagename: the page name @param revno: page revision number (int) @param mode: 'add' = just add, no checks 'update' = check if already in index and update if needed (mtime) """ page = Page(request, pagename, rev=revno) request.page = page # XXX for what is this needed? wikiname = request.cfg.interwikiname or u"Self" revision = str(page.get_real_rev()) itemid = "%s:%s:%s" % (wikiname, pagename, revision) mtime = page.mtime_usecs() doc = self._get_document(connection, itemid, mtime, mode) logging.debug("%s %s %r" % (pagename, revision, doc)) if doc: mimetype = 'text/%s' % page.pi['format'] # XXX improve this fields = {} fields['wikiname'] = wikiname fields['pagename'] = pagename fields['attachment'] = '' # this is a real page, not an attachment fields['mtime'] = str(mtime) fields['revision'] = revision fields['title'] = pagename fields['content'] = page.get_raw_body() fields['lang'], fields['stem_lang'] = self._get_languages(page) fields['author'] = page.edit_info().get('editor', '?') multivalued_fields = {} multivalued_fields['mimetype'] = [ mt for mt in [mimetype] + mimetype.split('/') ] multivalued_fields['domain'] = self._get_domains(page) multivalued_fields['linkto'] = page.getPageLinks(request) multivalued_fields['category'] = self._get_categories(page) self._add_fields_to_document(request, doc, fields, multivalued_fields) try: connection.replace(doc) except xappy.IndexerError, err: logging.warning("IndexerError at %r %r %r (%s)" % (wikiname, pagename, revision, str(err)))
def _index_page_rev(self, request, connection, pagename, revno, mode='update'): """ Index a page revision. @param request: request suitable for indexing @param connection: the Indexer connection object @param pagename: the page name @param revno: page revision number (int) @param mode: 'add' = just add, no checks 'update' = check if already in index and update if needed (mtime) """ page = Page(request, pagename, rev=revno) request.page = page # XXX for what is this needed? wikiname = request.cfg.interwikiname or u"Self" revision = str(page.get_real_rev()) itemid = "%s:%s:%s" % (wikiname, pagename, revision) mtime = page.mtime_usecs() doc = self._get_document(connection, itemid, mtime, mode) logging.debug("%s %s %r" % (pagename, revision, doc)) if doc: mimetype = 'text/%s' % page.pi['format'] # XXX improve this fields = {} fields['wikiname'] = wikiname fields['pagename'] = pagename fields['attachment'] = '' # this is a real page, not an attachment fields['mtime'] = str(mtime) fields['revision'] = revision fields['title'] = pagename fields['content'] = page.get_raw_body() fields['lang'], fields['stem_lang'] = self._get_languages(page) fields['author'] = page.edit_info().get('editor', '?') multivalued_fields = {} multivalued_fields['mimetype'] = [mt for mt in [mimetype] + mimetype.split('/')] multivalued_fields['domain'] = self._get_domains(page) multivalued_fields['linkto'] = page.getPageLinks(request) multivalued_fields['category'] = self._get_categories(page) self._add_fields_to_document(request, doc, fields, multivalued_fields) try: connection.replace(doc) except xappy.IndexerError, err: logging.warning("IndexerError at %r %r %r (%s)" % ( wikiname, pagename, revision, str(err)))
def testGetPageLinks(self): page = Page(self.request, u"FrontPage") assert u'WikiSandBox' in page.getPageLinks(self.request)