示例#1
0
def _wikify_one(pat):
    """
    Wikifies one link.
    """
    page_title = pat.group(2)
    if pat.group(1):
        page_name = pat.group(1).rstrip('|')
    else:
        page_name = page_title

    # interwiki
    if ':' in page_name:
        parts = page_name.split(':', 2)
        if page_name == page_title:
            page_title = parts[1]
        if parts[0] == 'List':
            logging.debug('Inserting a list of pages labelled with "%s".' % parts[1])
            pages = model.WikiContent.gql('WHERE labels = :1', parts[1]).fetch(100)
            text = u'\n'.join(['- <a class="int" href="%s">%s</a>' % (filters.pageurl(p.title), p.title) for p in pagesort(pages)])
            return text
        iwlink = get_settings(u'interwiki-' + parts[0])
        if iwlink:
            return '<a class="iw iw-%s" href="%s" target="_blank">%s</a>' % (parts[0], iwlink.replace('%s', urllib.quote(parts[1].encode('utf-8'))), page_title)

    return '<a class="int" href="%s">%s</a>' % (filters.pageurl(page_name), page_title)
示例#2
0
def _wikify_one(pat):
    """
    Wikifies one link.
    """
    page_title = pat.group(2)
    if pat.group(1):
        page_name = pat.group(1).rstrip('|')
    else:
        page_name = page_title

    # interwiki
    if ':' in page_name:
        parts = page_name.split(':', 2)
        if page_name == page_title:
            page_title = parts[1]
        if parts[0] == 'List':
            logging.debug('Inserting a list of pages labelled with "%s".' %
                          parts[1])
            pages = model.WikiContent.gql('WHERE labels = :1',
                                          parts[1]).fetch(100)
            text = u'\n'.join([
                '- <a class="int" href="%s">%s</a>' %
                (filters.pageurl(p.title), p.title) for p in pagesort(pages)
            ])
            return text
        iwlink = get_settings(u'interwiki-' + parts[0])
        if iwlink:
            return '<a class="iw iw-%s" href="%s" target="_blank">%s</a>' % (
                parts[0],
                iwlink.replace('%s', urllib.quote(
                    parts[1].encode('utf-8'))), page_title)

    return '<a class="int" href="%s">%s</a>' % (filters.pageurl(page_name),
                                                page_title)
示例#3
0
 def _flush_cache(self, page_name):
     """
     Removes a page from both page and data cache.
     """
     page_url = filters.pageurl(page_name)
     logging.debug('Cache DEL data#' + page_url)
     memcache.delete('data#' + page_url)
     logging.debug('Cache DEL page#' + page_url)
     memcache.delete('page#' + page_url)
示例#4
0
    def post(self):
        page = self._load_page(
            urllib.unquote(str(self.request.get('name'))).decode('utf-8'))
        old_title = page.title
        old_labels = page.labels

        # Save in the archive.
        if page.is_saved():
            backup = model.WikiRevision(title=page.title,
                                        revision_body=page.body,
                                        author=page.author,
                                        created=page.updated)
        else:
            backup = None

        if self.request.get('delete'):
            if page.is_saved():
                page.delete()
                if backup:
                    backup.put()
        else:
            page.body = self.request.get('body')
            page.author = self.get_wiki_user(create=True)
            page.links = self._get_linked_page_names(page.body)
            page.updated = datetime.datetime.now()
            logging.debug('%s links to: %s' % (page.title, page.links))

            options = parse_page_options(unicode(page.body))
            if options.has_key('redirect'):
                page.redirect = options['redirect']
            else:
                page.redirect = None
            if options.has_key('public') and options['public'] == 'yes':
                page.pread = True
            elif options.has_key('private') and options['private'] == 'yes':
                page.pread = False
            if options.has_key('labels'):
                page.labels = options['labels']
            else:
                page.labels = []
            # We only need the header, so we don't use extensions here.
            r = re.search('<h1>(.*)</h1>', markdown.markdown(options['text']))
            if r:
                page.title = r.group(1).strip()

            if backup and backup.revision_body != page.body:
                backup.put()
            page.put()

        self._flush_cache(page.title)
        self._flush_cache(old_title)

        # Flush labels cache
        for label in list(set(old_labels + page.labels)):
            self._flush_cache(u'Label:' + label)

        self.redirect(filters.pageurl(page.title))
示例#5
0
 def _flush_cache(self, page_name):
     """
     Removes a page from both page and data cache.
     """
     page_url = filters.pageurl(page_name)
     logging.debug('Cache DEL data#' + page_url)
     memcache.delete('data#' + page_url)
     logging.debug('Cache DEL page#' + page_url)
     memcache.delete('page#' + page_url)
示例#6
0
    def post(self):
        page = self._load_page(urllib.unquote(str(self.request.get('name'))).decode('utf-8'))
        old_title = page.title
        old_labels = page.labels

        # Save in the archive.
        if page.is_saved():
            backup = model.WikiRevision(title=page.title, revision_body=page.body, author=page.author, created=page.updated)
        else:
            backup = None

        if self.request.get('delete'):
            if page.is_saved():
                page.delete()
                if backup:
                    backup.put()
        else:
            page.body = self.request.get('body')
            page.author = self.get_wiki_user(create=True)
            page.links = self._get_linked_page_names(page.body)
            page.updated = datetime.datetime.now()
            logging.debug('%s links to: %s' % (page.title, page.links))

            options = parse_page_options(unicode(page.body))
            if options.has_key('redirect'):
                page.redirect = options['redirect']
            else:
                page.redirect = None
            if options.has_key('public') and options['public'] == 'yes':
                page.pread = True
            elif options.has_key('private') and options['private'] == 'yes':
                page.pread = False
            if options.has_key('labels'):
                page.labels = options['labels']
            else:
                page.labels = []
            # We only need the header, so we don't use extensions here.
            r = re.search('<h1>(.*)</h1>', markdown.markdown(options['text']))
            if r:
                page.title = r.group(1).strip()

            if backup and backup.revision_body != page.body:
                backup.put()
            page.put()

        self._flush_cache(page.title)
        self._flush_cache(old_title)

        # Flush labels cache
        for label in list(set(old_labels + page.labels)):
            self._flush_cache(u'Label:' + label)

        self.redirect(filters.pageurl(page.title))
示例#7
0
    def get(self):
        content = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"
        content += "<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">\n"

        show_all = get_settings('open-reading') == 'yes'
        host = self.request.environ['HTTP_HOST']

        for page in model.WikiContent.all().order('-updated').fetch(1000):
            if show_all or page.pread:
                line = "<url><loc>http://%s%s</loc>" % (host, filters.pageurl(page.title))
                if page.updated:
                    line += "<lastmod>%s</lastmod>" % (page.updated.strftime('%Y-%m-%d'))
                line += "</url>\n"
                content += line
        content += "</urlset>\n"

        self.response.headers['Content-Type'] = 'text/xml'
        self.response.out.write(content)
示例#8
0
    def get(self):
        content = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"
        content += "<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">\n"

        show_all = get_settings('open-reading') == 'yes'
        host = self.request.environ['HTTP_HOST']

        for page in model.WikiContent.all().order('-updated').fetch(1000):
            if show_all or page.pread:
                line = "<url><loc>http://%s%s</loc>" % (
                    host, filters.pageurl(page.title))
                if page.updated:
                    line += "<lastmod>%s</lastmod>" % (
                        page.updated.strftime('%Y-%m-%d'))
                line += "</url>\n"
                content += line
        content += "</urlset>\n"

        self.response.headers['Content-Type'] = 'text/xml'
        self.response.out.write(content)
示例#9
0
 def getStartPage(self):
     return filters.pageurl(get_settings('start_page', 'Welcome'))
示例#10
0
 def getStartPage(self):
     return filters.pageurl(get_settings('start_page', 'Welcome'))